repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/functional/product.py | bloodhound_multiproduct/tests/functional/product.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from uuid import uuid4
from trac.tests.functional import FunctionalTwillTestCaseSetup
from trac.tests.functional.tester import tc
from tests.functional import MultiproductFunctionalTestCase
#----------------
# Functional test cases for products
#----------------
class TestNewProduct(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Setup new product"""
prefix, name = self._tester.create_product()
base_url = self._testenv.get_env_href(prefix=prefix)
tc.url(base_url())
tc.follow('Index')
tc.find('Index by Title')
tc.find('<a[^>]*>Index by Date')
pages = ('TitleIndex', 'RecentChanges', 'InterTrac', 'InterWiki')
for page in pages:
tc.find('<a[^>]*>%s' % (page,))
tc.follow('Index by Date')
tc.find('Index by Date')
tc.find('<a[^>]*>Index by Title')
class TestProductRenameAuthor(MultiproductFunctionalTestCase,
FunctionalTwillTestCaseSetup):
def runTest(self):
"""Check for correct author in ticket comments on product rename
https://issues.apache.org/bloodhound/ticket/671
"""
prefix, name = self._tester.create_product()
with self.in_product(prefix) as (testenv, tester):
t1 = tester.create_ticket()
t2 = tester.create_ticket()
new_name = '%s%s' % (name, str(uuid4()).split('-')[0])
tc.go(self._tester.url + '/products')
tc.follow('.*/products/' + prefix + r'\?action=edit$')
tc.find('Edit Product')
tc.find(prefix)
tc.formvalue('edit', 'name', new_name)
tc.submit()
tc.find('Your changes have been saved')
with self.in_product(prefix) as (testenv, tester):
tester.go_to_ticket(t1)
comment = 'Product %s renamed to %s' % (name, new_name)
tc.find(comment)
tester.go_to_ticket(t2)
tc.find(comment)
def functionalSuite(suite=None):
if not suite:
import tests.functional
suite = tests.functional.functionalSuite()
suite.addTest(TestNewProduct())
suite.addTest(TestProductRenameAuthor())
return suite
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='functionalSuite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/mimeview/api.py | bloodhound_multiproduct/tests/mimeview/api.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for Apache(TM) Bloodhound's ticket reports in product environments"""
import unittest
from trac.mimeview.tests.api import MimeviewTestCase
from multiproduct.env import ProductEnvironment
from tests.env import MultiproductTestCase
class ProductMimeviewTestCase(MimeviewTestCase, MultiproductTestCase):
@property
def env(self):
env = getattr(self, '_env', None)
if env is None:
self.global_env = self._setup_test_env(
enable=['%s.%s' % (MimeviewTestCase.__module__, c)
for c in ['Converter0', 'Converter1', 'Converter2']]
)
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self._env = env = ProductEnvironment(
self.global_env, self.default_product)
return env
@env.setter
def env(self, value):
pass
def tearDown(self):
self.global_env.reset_db()
self.global_env = self._env = None
def test_suite():
return unittest.TestSuite([
# TODO : Put MIME API doctests in product context
# doctest.DocTestSuite(trac.mimeview.api)
unittest.makeSuite(ProductMimeviewTestCase,'test'),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/mimeview/patch.py | bloodhound_multiproduct/tests/mimeview/patch.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for Apache(TM) Bloodhound's patch renderer in product environments"""
import unittest
from trac.mimeview.api import Mimeview
from trac.mimeview.patch import PatchRenderer
from trac.mimeview.tests.patch import PatchRendererTestCase
from trac.web.chrome import Chrome
from multiproduct.env import ProductEnvironment
from tests.env import MultiproductTestCase
class ProductPatchRendererTestCase(PatchRendererTestCase, MultiproductTestCase):
@property
def env(self):
env = getattr(self, '_env', None)
if env is None:
self.global_env = self._setup_test_env(
enable=[Chrome, PatchRenderer]
)
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self._env = env = ProductEnvironment(
self.global_env, self.default_product)
return env
@env.setter
def env(self, value):
pass
def setUp(self):
PatchRendererTestCase.setUp(self)
self.patch = Mimeview(self.env).renderers[0]
def tearDown(self):
self.global_env.reset_db()
self.global_env = self._env = None
def test_suite():
return unittest.TestSuite([
unittest.makeSuite(ProductPatchRendererTestCase,'test'),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/mimeview/pygments.py | bloodhound_multiproduct/tests/mimeview/pygments.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for Apache(TM) Bloodhound's Pygments renderer in product environments"""
from tests import unittest
from tests.env import MultiproductTestCase
from trac.mimeview.api import Mimeview
from trac.mimeview.pygments import PygmentsRenderer
from trac.mimeview.tests import pygments as test_pygments
from trac.web.chrome import Chrome
from multiproduct.env import ProductEnvironment
have_pygments = False
if test_pygments.have_pygments:
super_class = test_pygments.PygmentsRendererTestCase
else:
class super_class(object):
test_empty_content = test_extra_mimetypes = test_newline_content = \
test_python_hello = test_python_hello_mimeview = \
lambda self : None
class ProductPygmentsRendererTestCase(super_class, MultiproductTestCase):
@property
def env(self):
env = getattr(self, '_env', None)
if env is None:
self.global_env = self._setup_test_env(
enable=[Chrome, PygmentsRenderer]
)
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self._env = env = ProductEnvironment(
self.global_env, self.default_product)
return env
@env.setter
def env(self, value):
pass
def setUp(self):
test_pygments.PygmentsRendererTestCase.setUp(self)
self.pygments = Mimeview(self.env).renderers[0]
def tearDown(self):
self.global_env.reset_db()
self.global_env = self._env = None
ProductPygmentsRendererTestCase = unittest.skipUnless(
test_pygments.have_pygments,
'mimeview/tests/pygments (no pygments installed)'
)(ProductPygmentsRendererTestCase)
def test_suite():
return unittest.TestSuite([
unittest.makeSuite(ProductPygmentsRendererTestCase,'test'),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/mimeview/__init__.py | bloodhound_multiproduct/tests/mimeview/__init__.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from tests import TestLoader
def test_suite():
return TestLoader().discover_package(__package__, pattern='*.py')
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/web/wikisyntax.py | bloodhound_multiproduct/tests/web/wikisyntax.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for inherited Apache(TM) Bloodhound htdocs wiki syntax
in product environments"""
import os.path
import re
import unittest
from trac.web.tests import wikisyntax
from multiproduct.env import ProductEnvironment
from tests.env import MultiproductTestCase
from tests.wiki import formatter
def test_suite():
return formatter.test_suite(wikisyntax.TEST_CASES, file=wikisyntax.__file__)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/web/chrome.py | bloodhound_multiproduct/tests/web/chrome.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for Apache(TM) Bloodhound web chrome API in product environments"""
import unittest
from trac.web.tests.chrome import ChromeTestCase
from multiproduct.env import ProductEnvironment
from tests.env import MultiproductTestCase
class ProductChromeTestCase(ChromeTestCase, MultiproductTestCase):
@property
def env(self):
env = getattr(self, '_env', None)
if env is None:
self.global_env = self._setup_test_env()
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self._env = env = ProductEnvironment(
self.global_env, self.default_product)
return env
@env.setter
def env(self, value):
pass
def setUp(self):
# Instantiate environment stub before clearing ComponentMeta._registry
self.env
ChromeTestCase.setUp(self)
def tearDown(self):
try:
try:
global_env = self.global_env
except AttributeError:
pass
else:
self.global_env.reset_db()
self.global_env = self._env = None
finally:
ChromeTestCase.tearDown(self)
def test_suite():
return unittest.makeSuite(ProductChromeTestCase,'test')
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/web/__init__.py | bloodhound_multiproduct/tests/web/__init__.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from tests import TestLoader
def test_suite():
return TestLoader().discover_package(__package__, pattern='*.py')
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/tests/web/session.py | bloodhound_multiproduct/tests/web/session.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for Apache(TM) Bloodhound's ticket reports in product environments"""
import unittest
from trac.web.tests.session import SessionTestCase
from multiproduct.env import ProductEnvironment
from tests.env import MultiproductTestCase
class ProductSessionTestCase(SessionTestCase, MultiproductTestCase):
@property
def env(self):
env = getattr(self, '_env', None)
if env is None:
self.global_env = self._setup_test_env()
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self._env = env = ProductEnvironment(
self.global_env, self.default_product)
return env
@env.setter
def env(self, value):
pass
def tearDown(self):
self.global_env.reset_db()
self.global_env = self._env = None
def test_suite():
return unittest.TestSuite([
unittest.makeSuite(ProductSessionTestCase,'test'),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/api.py | bloodhound_multiproduct/multiproduct/api.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import os
import shutil
from genshi.builder import tag, Element
from genshi.core import escape, Markup, unescape
from pkg_resources import resource_filename
from trac.attachment import Attachment
from trac.config import Option, PathOption
from trac.core import Component, TracError, implements, Interface
from trac.db import Table, Column, DatabaseManager, Index
import trac.db_default
from trac.env import IEnvironmentSetupParticipant, Environment
from trac.perm import IPermissionRequestor, PermissionCache
from trac.resource import IExternalResourceConnector, IResourceChangeListener,\
IResourceManager, ResourceNotFound
from trac.ticket.api import ITicketFieldProvider, ITicketManipulator
from trac.util.text import to_unicode, unquote_label, unicode_unquote
from trac.web.chrome import ITemplateProvider, add_warning
from trac.web.main import FakePerm, FakeSession
from trac.wiki.admin import WikiAdmin
from trac.wiki.api import IWikiSyntaxProvider
from trac.wiki.parser import WikiParser
from multiproduct.dbcursor import GLOBAL_PRODUCT
from multiproduct.model import Product, ProductResourceMap, ProductSetting
from multiproduct.util import EmbeddedLinkFormatter, IDENTIFIER, \
using_mysql_backend, using_sqlite_backend
from multiproduct.util.translation import _, N_, add_domain
__all__ = ['MultiProductSystem', 'PRODUCT_SYNTAX_DELIMITER']
DB_VERSION = 5
DB_SYSTEM_KEY = 'bloodhound_multi_product_version'
PLUGIN_NAME = 'Bloodhound multi product'
class ISupportMultiProductEnvironment(Interface):
"""Extension point interface for components that are aware of multi
product environment and its specifics.
Component implementing this interface is handled in a special way in the
following scenarios:
* if implementing `IEnvironmentSetupParticipant` interface, the component
will only be invoked once per global environment creation/upgrade. It is
up to the component to install/update it's environment specifics (schema,
possibly files, etc.) for all products. In contrast, components that don't
implement `ISupportMultiProductEnvironment` interface will be, during
install/update, invoked per product environment.
"""
pass
class MultiProductSystem(Component):
"""Creates the database tables and template directories"""
implements(IEnvironmentSetupParticipant, IExternalResourceConnector,
IPermissionRequestor, IResourceChangeListener, IResourceManager,
ISupportMultiProductEnvironment, ITemplateProvider,
ITicketFieldProvider, IWikiSyntaxProvider, ITicketManipulator)
default_product_prefix = Option(
'multiproduct',
'default_product_prefix',
default='@',
doc="""Prefix used for default product when migrating single-product
installations to multi-product.""", doc_domain='multiproduct')
default_product = Option('ticket', 'default_product', '',
"""Default product for newly created tickets.""")
product_base_url = Option('multiproduct', 'product_base_url', '',
"""A pattern used to generate the base URL of product environments,
e.g. the use cases listed in bh:wiki:/Proposals/BEP-0003#url-mapping .
Both absolute as well as relative URLs are supported. The later
will be resolved with respect to the base URL of the parent global
environment. The pattern may contain references to $(envname)s,
$(prefix)s and $(name)s placeholders representing the environment name,
product prefix and product name respectively . If nothing is set the
following will be used `products/$(prefix)s`
Note the usage of `$(...)s` instead of `%(...)s` as the later form
would be interpreted by the ConfigParser itself. """,
doc_domain='multiproduct')
product_config_parent = PathOption('inherit', 'multiproduct', '',
"""The path to the configuration file containing the settings shared
by sibling product environments. By default will inherit
global environment configuration.
""", doc_domain='multiproduct')
SCHEMA = [mcls._get_schema()
for mcls in (Product, ProductResourceMap)]
# Tables which should be migrated (extended with 'product' column)
MIGRATE_TABLES = ['component',
'milestone',
'version',
'enum',
'permission',
'wiki',
'report',
]
PRODUCT_POPULATE_TABLES = list(set(MIGRATE_TABLES) - set(['wiki']))
def __init__(self, *args, **kwargs):
import pkg_resources
locale_dir = pkg_resources.resource_filename(__name__, 'locale')
add_domain(self.env.path, locale_dir)
super(MultiProductSystem, self).__init__(*args, **kwargs)
def get_version(self):
"""Finds the current version of the bloodhound database schema"""
rows = self.env.db_direct_query("""
SELECT value FROM system WHERE name = %s
""", (DB_SYSTEM_KEY,))
return int(rows[0][0]) if rows else -1
# IEnvironmentSetupParticipant methods
def environment_created(self):
"""Insertion of any default data into the database."""
self.log.debug("creating environment for %s plugin." % PLUGIN_NAME)
def environment_needs_upgrade(self, db_dummy=None):
"""Detects if the installed db version matches the running system"""
db_installed_version = self.get_version()
if db_installed_version > DB_VERSION:
raise TracError('''Current db version (%d) newer than supported by
this version of the %s (%d).''' % (db_installed_version,
PLUGIN_NAME,
DB_VERSION))
needs_upgrade = db_installed_version < DB_VERSION
if not needs_upgrade:
self.env.enable_multiproduct_schema(True)
return needs_upgrade
def _update_db_version(self, db, version):
old_version = self.get_version()
if old_version != -1:
self.log.info("Updating multiproduct database schema from version %d"
" to %d" % (old_version, version))
db("""UPDATE system SET value=%s
WHERE name=%s""", (version, DB_SYSTEM_KEY))
else:
self.log.info("Initial multiproduct database schema set to version %d" % version)
db("""
INSERT INTO system (name, value) VALUES ('%s','%s')
""" % (DB_SYSTEM_KEY, version))
return version
_system_wiki_list = None
@property
def system_wiki_list(self):
if MultiProductSystem._system_wiki_list is None:
MultiProductSystem._system_wiki_list = self._get_system_wiki_list()
return MultiProductSystem._system_wiki_list
def _get_system_wiki_list(self):
"""Helper function that enumerates all 'system' wikis. The
list is combined of default wiki pages and pages that are
bundled with Bloodhound dashboard and search plugins"""
from bhdashboard import wiki
paths = [resource_filename('trac.wiki',
'default-pages')] + \
[resource_filename('bhdashboard',
'default-pages')] + \
[resource_filename('bhsearch',
'default-pages')]
pages = []
original_pages = []
for path in paths:
for page in os.listdir(path):
filename = os.path.join(path, page)
page = unicode_unquote(page.encode('utf-8'))
if os.path.isfile(filename):
original_pages.append(page)
for original_name in original_pages:
if original_name.startswith('Trac'):
new_name = wiki.new_name(original_name)
if not new_name:
continue
if new_name in original_pages:
continue
name = new_name
# original trac wikis should also be included in the list
pages.append(original_name)
else:
name = original_name
pages.append(name)
return pages
def upgrade_environment(self, db_dummy=None):
"""Installs or updates tables to current version"""
self.log.debug("upgrading existing environment for %s plugin." %
PLUGIN_NAME)
db_installed_version = self.get_version()
with self.env.db_direct_transaction as db:
if db_installed_version < 1:
self._add_column_product_to_ticket(db)
self._create_multiproduct_tables(db)
db_installed_version = self._update_db_version(db, 1)
if db_installed_version < 2:
self._replace_product_on_ticket_with_product_prefix(db)
db_installed_version = self._update_db_version(db, 2)
if db_installed_version < 3:
SYSTEM_TABLES = ['system']
TICKET_TABLES = [
'ticket_change', 'ticket_custom', 'attachment',
]
table_defs = self._add_product_column_to_tables(
self.MIGRATE_TABLES + TICKET_TABLES + SYSTEM_TABLES,
db_installed_version)
table_columns = self._get_table_columns(table_defs)
create_temp_table = lambda table: self._create_temp_table(
db, table, table_columns, table_defs)
self._insert_default_product(db)
self._upgrade_tickets(db, TICKET_TABLES, create_temp_table)
self._upgrade_wikis(db, create_temp_table)
self._upgrade_system_tables(db, create_temp_table)
self._soft_link_repositories_to_default_product(db)
self._upgrade_table_system(SYSTEM_TABLES, create_temp_table, db)
self._enable_multiproduct_hooks()
db_installed_version = self._update_db_version(db, 3)
if db_installed_version < 4:
self._create_product_tables_for_plugins(db)
db_installed_version = self._update_db_version(db, 4)
if db_installed_version < 5:
table_defs = self._add_product_column_to_tables(
['ticket'], db_installed_version)
self._modify_ticket_pk(db, table_defs)
db_installed_version = self._update_db_version(db, 5)
self.env.enable_multiproduct_schema(True)
def _add_column_product_to_ticket(self, db):
self.log.debug("Adding field product to ticket table")
db("ALTER TABLE ticket ADD COLUMN product TEXT")
def _create_multiproduct_tables(self, db):
self.log.debug("Creating initial db tables for %s plugin." %
PLUGIN_NAME)
db_connector, dummy = DatabaseManager(self.env)._get_connector()
for table in self.SCHEMA:
for statement in db_connector.to_sql(table):
db(statement)
def _replace_product_on_ticket_with_product_prefix(self, db):
for prod in Product.select(self.env):
db("""UPDATE ticket SET product=%s
WHERE product=%s""", (prod.prefix, prod.name))
def _create_temp_table(self, db, table, table_columns, table_defs):
"""creates temporary table with the new schema and
drops original table"""
table_temp_name = '%s_temp' % table
if table == 'report':
cols = ','.join([c for c in table_columns[table] if c != 'id'])
else:
cols = ','.join(table_columns[table])
self.log.info("Migrating table '%s' to a new schema", table)
db("""CREATE TABLE %s AS SELECT %s FROM %s""" %
(table_temp_name, cols, table))
db("""DROP TABLE %s""" % table)
db_connector, _ = DatabaseManager(self.env)._get_connector()
table_schema = [t for t in table_defs if t.name == table][0]
for sql in db_connector.to_sql(table_schema):
db(sql)
return table_temp_name, cols
def _drop_temp_table(self, db, table):
db("""DROP TABLE %s""" % table)
def _add_product_column_to_tables(self, tables, current_version):
"""Extend trac default schema by adding product column
and extending key with product.
"""
table_defs = [copy.deepcopy(t) for t in trac.db_default.schema
if
t.name in tables]
for t in table_defs:
t.columns.append(Column('product'))
if isinstance(t.key, list):
t.key = tuple(t.key) + tuple(['product'])
elif isinstance(t.key, tuple):
t.key = t.key + tuple(['product'])
else:
raise TracError(
"Invalid table '%s' schema key '%s' while upgrading "
"plugin '%s' from version %d to %d'" %
(t.name, t.key, PLUGIN_NAME, current_version, 3))
return table_defs
def _get_table_columns(self, table_defs, all_columns=False):
table_columns = dict()
for table in table_defs:
table_definition = \
[t for t in table_defs if t.name == table.name][0]
column_names = \
[column.name for column in table_definition.columns]
table_columns[table.name] = \
[c for c in column_names if all_columns or c != 'product']
return table_columns
def _insert_default_product(self, db):
self.log.info("Creating default product")
db("""INSERT INTO bloodhound_product (prefix, name, description, owner)
VALUES ('%s', '%s', '%s', '')
""" % (self.default_product_prefix, 'Default', 'Default product'))
def _upgrade_tickets(self, db, TICKET_TABLES, create_temp_table):
# migrate tickets that don't have product assigned to default product
# - update ticket table product column
# - update ticket related tables by:
# - upgrading schema
# - update product column to match ticket's product
self.log.info("Migrating tickets w/o product to default product")
db("""UPDATE ticket SET product='%s'
WHERE (product IS NULL OR product='')
""" % self.default_product_prefix)
self._migrate_attachments(
db("""SELECT a.type, a.id, a.filename
FROM attachment a
INNER JOIN ticket t ON a.id = %(t.id)s
WHERE a.type='ticket'
""" % {'t.id': db.cast('t.id', 'text')}),
to_product=self.default_product_prefix
)
self.log.info("Migrating ticket tables to a new schema")
for table in TICKET_TABLES:
temp_table_name, cols = create_temp_table(table)
db("""INSERT INTO %s (%s, product)
SELECT %s, '' FROM %s""" %
(table, cols, cols, temp_table_name))
self._drop_temp_table(db, temp_table_name)
if table == 'attachment':
db("""UPDATE attachment
SET product=(SELECT ticket.product
FROM ticket
WHERE %(ticket.id)s=attachment.id
LIMIT 1)
WHERE attachment.type='ticket'
AND EXISTS(SELECT ticket.product
FROM ticket
WHERE %(ticket.id)s=attachment.id)
""" % {'ticket.id': db.cast('ticket.id', 'text')})
else:
db("""UPDATE %(table)s
SET product=(SELECT ticket.product
FROM ticket
WHERE ticket.id=%(table)s.ticket)
""" % {'table': table})
def _upgrade_system_tables(self, db, create_temp_table):
# migrate system table (except wiki which is handled separately)
# to a new schema
# - create tables with the new schema
# - populate system tables with global configuration for each product
# - exception is permission table where permissions
# are also populated in global scope
#
# permission table specifics: 'anonymous' and 'authenticated' users
# should by default have a PRODUCT_VIEW permission for all products
self.log.info("Migrating system tables to a new schema")
for table in self.MIGRATE_TABLES:
if table == 'wiki':
continue
temp_table_name, cols = create_temp_table(table)
for product in Product.select(self.env):
self.log.info("Populating table '%s' for product '%s' ('%s')",
table, product.name, product.prefix)
db("""INSERT INTO %s (%s, product) SELECT %s,'%s' FROM %s""" %
(table, cols, cols, product.prefix, temp_table_name))
if table == 'permission':
db.executemany(
"""INSERT INTO permission (username, action, product)
VALUES (%s, %s, %s)""",
[('anonymous', 'PRODUCT_VIEW', product.prefix),
('authenticated', 'PRODUCT_VIEW', product.prefix)])
if table == 'permission':
self.log.info("Populating table '%s' for global scope", table)
db("""INSERT INTO %s (%s, product) SELECT %s,'%s' FROM %s""" %
(table, cols, cols, '', temp_table_name))
self._drop_temp_table(db, temp_table_name)
db.executemany(
"""INSERT INTO permission (username, action, product)
VALUES (%s, %s, %s)""",
[('anonymous', 'PRODUCT_VIEW', ''),
('authenticated', 'PRODUCT_VIEW', '')])
def _upgrade_wikis(self, db, create_temp_table):
# migrate wiki table
# - populate system wikis to all products + global scope
# - update wiki attachment product to match wiki product
table = 'wiki'
temp_table_name, cols = create_temp_table(table)
self.log.info("Migrating wikis to default product")
db("""INSERT INTO %(table)s (%(cols)s, product)
SELECT %(cols)s, '%(default_product)s' FROM %(temp_table)s
""" % dict(table=table,
temp_table=temp_table_name,
cols=cols,
default_product=self.default_product_prefix,))
db("""UPDATE attachment
SET product='%s'
WHERE attachment.type='wiki'
""" % self.default_product_prefix)
self._migrate_attachments(
db("""SELECT type, id, filename
FROM attachment
WHERE type='wiki'
AND product='%s'
""" % (self.default_product_prefix)),
to_product=self.default_product_prefix,
)
self._drop_temp_table(db, temp_table_name)
def _migrate_attachments(self, attachments, to_product=None, copy=False):
for type, id, filename in attachments:
old_path = Attachment._get_path(self.env.path, type, id, filename)
new_path = self.env.path
if to_product:
new_path = os.path.join(new_path, 'products', to_product)
new_path = Attachment._get_path(new_path, type, id, filename)
dirname = os.path.dirname(new_path)
if not os.path.exists(old_path):
self.log.warning(
"Missing attachment files for %s:%s/%s",
type, id, filename)
continue
if os.path.exists(new_path):
# TODO: Do we want to overwrite?
continue
try:
if not os.path.exists(dirname):
os.makedirs(dirname)
if copy:
if hasattr(os, 'link'):
# TODO: It this safe?
os.link(old_path, new_path)
else:
shutil.copy(old_path, new_path)
else:
os.rename(old_path, new_path)
except OSError as err:
self.log.warning(
"Could not move attachment %s from %s %s to"
"product @ (%s)",
filename, type, id, str(err)
)
def _soft_link_repositories_to_default_product(self, db):
# soft link existing repositories to default product
repositories_linked = []
for id, name in db("""SELECT id, value FROM repository
WHERE name='name'"""):
if id in repositories_linked:
continue
db("""INSERT INTO repository (id, name, value)
VALUES (%s, 'product', '%s')""" %
(id, self.default_product_prefix))
repositories_linked.append(id)
self.log.info("Repository '%s' (%s) soft linked to default product",
name, id)
def _upgrade_table_system(self, SYSTEM_TABLES, create_temp_table, db):
# Update system tables
# Upgrade schema
self.log.info("Migrating system tables to a new schema")
for table in SYSTEM_TABLES:
temp_table_name, cols = create_temp_table(table)
db("""INSERT INTO %s (%s, product)
SELECT %s,'' FROM %s""" %
(table, cols, cols, temp_table_name))
self._drop_temp_table(db, temp_table_name)
def _enable_multiproduct_hooks(self):
# enable multi product hooks in environment configuration
config_update = False
if not 'environment_factory' in self.env.config['trac']:
self.env.config['trac'].set('environment_factory',
'multiproduct.hooks.MultiProductEnvironmentFactory')
config_update = True
if not 'request_factory' in self.env.config['trac']:
self.env.config['trac'].set('request_factory',
'multiproduct.hooks.ProductRequestFactory')
config_update = True
if config_update:
self.log.info(
"Enabling multi product hooks in environment configuration")
self.env.config.save()
def _create_product_tables_for_plugins(self, db):
self.log.debug("creating additional db tables for %s plugin." %
PLUGIN_NAME)
db_connector, dummy = DatabaseManager(self.env)._get_connector()
for statement in db_connector.to_sql(ProductSetting._get_schema()):
db(statement)
def _modify_ticket_pk(self, db, table_defs):
self.log.debug("Modifying ticket primary key: id -> uid")
table_columns = self._get_table_columns(table_defs, True)
db_connector, _ = DatabaseManager(self.env)._get_connector()
def rename_id_to_uid(table):
for c in table.columns:
if c.name == 'id':
c.name = 'uid'
break
table.key = ['uid']
def add_new_id_column(table):
id_column = Column('id', type='int', auto_increment=True)
if using_sqlite_backend(self.env) or using_mysql_backend(self.env):
# sqlite and mysql don't support multiple auto increment columns
id_column.auto_increment = False
table.columns.append(id_column)
table.indices.append(Index(['product', 'id'], unique=True))
for t in table_defs:
rename_id_to_uid(t)
add_new_id_column(t)
temp_table_name, cols = self._create_temp_table(
db, t.name, table_columns, table_defs)
db("""INSERT INTO ticket (%s, uid)
SELECT %s, id FROM ticket_temp""" %
(cols, cols))
self._drop_temp_table(db, temp_table_name)
db.update_sequence(db.cursor(), 'ticket', 'id')
db.update_sequence(db.cursor(), 'ticket', 'uid')
# IResourceChangeListener methods
def match_resource(self, resource):
return isinstance(resource, Product)
def resource_created(self, resource, context):
import trac.db_default
from multiproduct.env import EnvironmentStub
# Don't populate product database when running from within test
# environment stub as test cases really don't expect that ...
if isinstance(self.env, EnvironmentStub):
return
product = resource
self.log.debug("Adding product info (%s) to tables:" % product.prefix)
with self.env.db_direct_transaction as db:
# create the default entries for this Product from defaults
for table in trac.db_default.get_data(db):
if not table[0] in self.PRODUCT_POPULATE_TABLES:
continue
self.log.debug(" -> %s" % table[0])
cols = table[1] + ('product', )
rows = [p + (product.prefix, ) for p in table[2]]
db.executemany(
"INSERT INTO %s (%s) VALUES (%s)" %
(table[0], ','.join(cols), ','.join(['%s' for c in cols])),
rows)
# Import default pages in product wiki
wikiadmin = WikiAdmin(ProductEnvironment(self.env, product.prefix))
pages = ('TitleIndex', 'RecentChanges', 'InterTrac', 'InterWiki')
for page in pages:
filename = resource_filename('trac.wiki', 'default-pages/' + page)
wikiadmin.import_page(filename, page)
def resource_changed(self, resource, old_values, context):
return
def resource_deleted(self, resource, context):
return
def resource_version_deleted(self, resource, context):
return
# ITemplateProvider methods
def get_templates_dirs(self):
"""provide the plugin templates"""
return [resource_filename(__name__, 'templates')]
def get_htdocs_dirs(self):
"""proved the plugin htdocs"""
return []
# IPermissionRequestor methods
def get_permission_actions(self):
acts = ['PRODUCT_CREATE', 'PRODUCT_DELETE', 'PRODUCT_MODIFY',
'PRODUCT_VIEW']
if not isinstance(self.env, ProductEnvironment):
return acts + [('PRODUCT_ADMIN', acts)] + [('ROADMAP_ADMIN', acts)]
else:
# In product context PRODUCT_ADMIN will be provided by product env
# to ensure it will always be handy
return acts
# ITicketFieldProvider methods
def get_select_fields(self):
"""Product select fields"""
return [(35, {'name': 'product', 'label': _('Product'),
'cls': Product, 'pk': 'prefix', 'optional': False,
'value': self.default_product})]
def get_radio_fields(self):
"""Product radio fields"""
return []
# IResourceManager methods
def get_resource_realms(self):
"""Manage 'product' realm.
"""
yield 'product'
def get_resource_description(self, resource, format='default', context=None,
**kwargs):
"""Describe product resource.
"""
desc = resource.id
if format != 'compact':
desc = _('Product %(name)s', name=resource.id)
if context:
return self._render_link(context, resource.id, desc)
else:
return desc
def resource_exists(self, resource):
"""Check whether product exists physically.
"""
products = Product.select(self.env, where={'name' : resource.id})
return bool(products)
# IExternalResourceConnector methods
def get_supported_neighborhoods(self):
"""Neighborhoods for `product` and `global` environments.
"""
yield 'product'
yield 'global'
def load_manager(self, neighborhood):
"""Load global environment or product environment given its prefix
"""
if neighborhood._realm == 'global':
# FIXME: ResourceNotFound if neighborhood ID != None ?
prefix = GLOBAL_PRODUCT
elif neighborhood._realm == 'product':
prefix = neighborhood._id
else:
raise ResourceNotFound(_(u'Unsupported neighborhood %(realm)s',
realm=neighborhood._realm))
try:
return lookup_product_env(self.env, prefix)
except LookupError:
raise ResourceNotFound(_(u'Unknown product prefix %(prefix)s',
prefix=prefix))
def manager_exists(self, neighborhood):
"""Check whether the target environment exists physically.
"""
if neighborhood._realm == 'global':
# Global environment
return isinstance(self.env, (Environment, ProductEnvironment))
elif neighborhood._realm == 'product':
prefix = neighborhood._id
if not prefix:
# Global environment
return True
return Product(lookup_product_env(self.env, GLOBAL_PRODUCT),
{'prefix' : prefix})._exists
# IWikiSyntaxProvider methods
short_syntax_delimiter = u'->'
def get_wiki_syntax(self):
yield (r'(?<!\S)!?(?P<pid>%s)%s(?P<ptarget>%s:(?:%s)|%s|%s(?:%s*%s)?)' %
(IDENTIFIER,
PRODUCT_SYNTAX_DELIMITER_RE,
WikiParser.LINK_SCHEME, WikiParser.QUOTED_STRING,
WikiParser.QUOTED_STRING, WikiParser.SHREF_TARGET_FIRST,
WikiParser.SHREF_TARGET_MIDDLE, WikiParser.SHREF_TARGET_LAST),
lambda f, m, fm :
self._format_link(f, 'product',
'%s:%s' % (fm.group('pid'),
unquote_label(fm.group('ptarget'))),
fm.group(0), fm))
if self.env[ProductTicketModule] is not None:
yield (r"(?<!\S)!?(?P<jtp>%s)-(?P<jtt>\d+)(?P<jtf>[?#]\S+)?" %
(IDENTIFIER,),
lambda f, m, fm :
self._format_link(f, 'product',
'%s:ticket:%s' %
(fm.group('jtp'),
fm.group('jtt') +
(fm.group('jtf') or '')),
m, fm))
def get_link_resolvers(self):
yield ('global', self._format_link)
yield ('product', self._format_link)
# ITicketManipulator methods
def validate_ticket(self, req, ticket):
# check whether the owner exists in db, add a warning if not
if req.args.get('action') == 'reassign' and \
ticket['owner'] != self.env.config.get('ticket', 'default_owner'):
owner = self.env.db_direct_query(
"SELECT sid FROM session WHERE sid=%s",
(ticket['owner'], ))
if not owner:
# Note: add_warning() is used intead of returning a list of
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/web_ui.py | bloodhound_multiproduct/multiproduct/web_ui.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from trac.core import Component, TracError, implements
from trac.resource import Neighborhood, Resource, ResourceNotFound
from trac.web.api import HTTPNotFound, IRequestHandler, IRequestFilter
from trac.web.chrome import (
Chrome, INavigationContributor, add_link, add_notice, add_warning,
prevnext_nav, web_context
)
from multiproduct.env import resolve_product_href, lookup_product_env
from multiproduct.hooks import PRODUCT_RE
from multiproduct.model import Product
from multiproduct.env import ProductEnvironment
from multiproduct.util.translation import _
# requests to the following URLs will be skipped in the global scope
# (no more redirection to default product)
IGNORED_REQUESTS_RE = \
re.compile(r'^/(?P<section>milestone|roadmap|search|'
r'(raw-|zip-)?attachment/(ticket|milestone))(?P<pathinfo>.*)')
class ProductModule(Component):
"""Base Product behaviour"""
implements(IRequestFilter, IRequestHandler)
# IRequestFilter methods
def pre_process_request(self, req, handler):
if not isinstance(self.env, ProductEnvironment) and \
IGNORED_REQUESTS_RE.match(req.path_info):
return None
return handler
def post_process_request(req, template, data, content_type):
return template, data, content_type
# IRequestHandler methods
def match_request(self, req):
m = PRODUCT_RE.match(req.path_info)
if m:
req.args['productid'] = m.group('pid')
req.args['pathinfo'] = m.group('pathinfo')
return not m is None
def process_request(self, req):
"""process request handler"""
req.perm.require('PRODUCT_VIEW')
pid = req.args.get('productid', None)
if pid:
req.perm('product', pid).require('PRODUCT_VIEW')
try:
product = Product(self.env, {'prefix': pid})
except ResourceNotFound:
product = Product(self.env)
path_info = req.args.get('pathinfo')
if path_info and path_info != '/':
if not product._exists:
# bh:ticket:561 - Display product list and warning message
if pid:
add_warning(req, _("Product %(pid)s not found", pid=pid))
return self._render_list(req)
else:
raise HTTPNotFound(
_('Unable to render product page. Wrong setup?'))
if pid:
add_link(req, 'up', req.href.products(), _('Products'))
action = req.args.get('action', 'view')
if req.method == 'POST':
if 'cancel' in req.args:
req.redirect(req.href.products(product.prefix))
elif action == 'edit':
return self._do_save(req, product)
elif action == 'delete':
raise TracError(_('Product removal is not allowed!'))
elif action in ('new', 'edit'):
return self._render_editor(req, product)
elif action == 'delete':
raise TracError(_('Product removal is not allowed!'))
if not product._exists:
if pid:
# bh:ticket:561 - Display product list and warning message
add_warning(req, _("Product %(pid)s not found", pid=pid))
return self._render_list(req)
data = {'product': product,
'context': web_context(req, product.resource)}
return 'product_view.html', data, None
def _render_list(self, req):
"""products list"""
products = [p for p in Product.select(self.env)
if 'PRODUCT_VIEW' in req.perm(Neighborhood('product',
p.prefix))]
map(lambda p: setattr(p, 'href', resolve_product_href(
lookup_product_env(self.env, p.prefix), self.env)), products)
data = {'products': products,
'context': web_context(req, Resource('product', None))}
return 'product_list.html', data, None
def _render_editor(self, req, product):
"""common processing for creating rendering the edit page"""
if product._exists:
req.perm(product.resource).require('PRODUCT_MODIFY')
else:
req.perm(product.resource).require('PRODUCT_CREATE')
chrome = Chrome(self.env)
chrome.add_jquery_ui(req)
chrome.add_wiki_toolbars(req)
data = {'product': product,
'context': web_context(req, product.resource)}
return 'product_edit.html', data, None
def _do_save(self, req, product):
"""common processing for product save events"""
req.perm.require('PRODUCT_VIEW')
name = req.args.get('name')
prefix = req.args.get('prefix')
description = req.args.get('description', '')
owner = req.args.get('owner') or req.authname
keys = {'prefix': prefix}
field_data = {'name': name,
'description': description,
'owner': owner,
}
warnings = []
def warn(msg):
add_warning(req, msg)
warnings.append(msg)
if product._exists:
if name != product.name and Product.select(self.env,
where={'name': name}):
warn(_('A product with name "%(name)s" already exists, please '
'choose a different name.', name=name))
elif not name:
warn(_('You must provide a name for the product.'))
else:
req.perm.require('PRODUCT_MODIFY')
product.update_field_dict(field_data)
product.update(req.authname)
add_notice(req, _('Your changes have been saved.'))
else:
req.perm.require('PRODUCT_CREATE')
if not prefix:
warn(_('You must provide a prefix for the product.'))
elif Product.select(self.env, where={'prefix': prefix}):
warn(_('Product "%(id)s" already exists, please choose another '
'prefix.', id=prefix))
if not name:
warn(_('You must provide a name for the product.'))
elif Product.select(self.env, where={'name': name}):
warn(_('A product with name "%(name)s" already exists, please '
'choose a different name.', name=name))
if not warnings:
prod = Product(self.env)
prod.update_field_dict(keys)
prod.update_field_dict(field_data)
prod.insert()
add_notice(req, _('The product "%(id)s" has been added.',
id=prefix))
if warnings:
product.update_field_dict(keys)
product.update_field_dict(field_data)
return self._render_editor(req, product)
req.redirect(req.href.products(prefix))
# helper methods for INavigationContributor implementations
@classmethod
def get_product_path(cls, env, req, itempath):
"""Provide a navigation item path"""
product = req.args.get('productid', '')
if product and env.is_component_enabled(ProductModule):
return req.href('products', product, itempath)
return req.href(itempath)
@classmethod
def get_product_list(cls, env, req, href_fcn=None):
"""Returns a list of products as (prefix, name, url) tuples
"""
if href_fcn is None:
href_fcn = req.href.products
product_list = []
for product in Product.select(env):
if 'PRODUCT_VIEW' in req.perm(Neighborhood('product',
product.prefix).
child(product.resource)):
product_list.append((product.prefix, product.name,
href_fcn(product.prefix)))
return product_list
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/model.py | bloodhound_multiproduct/multiproduct/model.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from itertools import izip
from trac.core import TracError
from trac.resource import Resource
from trac.ticket.model import Ticket
from trac.ticket.query import Query
from trac.util.datefmt import utc
from bhdashboard.model import ModelBase
# -------------------------------------------
# Product API
# -------------------------------------------
class Product(ModelBase):
"""The Product table"""
_meta = {'table_name': 'bloodhound_product',
'object_name': 'Product',
'key_fields': ['prefix'],
'non_key_fields': ['name', 'description', 'owner'],
'no_change_fields': ['prefix'],
'unique_fields': ['name'],
}
@property
def resource(self):
"""Allow Product to be treated as a Resource"""
return Resource('product', self.prefix)
def delete(self, resources_to=None):
""" override the delete method so that we can move references to this
object to a new product """
if resources_to is not None:
new_product = Product(self._env, resources_to)
if not new_product._exists:
sdata = {'new_table': resources_to}
sdata.update(self._meta)
raise TracError('%(object_name)s %(new_table)s does not exist' %
sdata)
original_prefix = self._data['prefix']
super(Product, self).delete()
#find and update all resources that should move
where = {'product_id': original_prefix}
for prm in ProductResourceMap.select(self._env, where=where):
prm._data['product_id'] = resources_to
prm.update()
def _update_relations(self, db=None, author=None):
"""Extra actions due to update"""
# tickets need to be updated
old_name = self._old_data['name']
new_name = self._data['name']
now = datetime.now(utc)
comment = 'Product %s renamed to %s' % (old_name, new_name)
if old_name != new_name:
env = self._env.parent or self._env
if self._data['prefix']:
from multiproduct.env import ProductEnvironment
env = ProductEnvironment(env, self._data['prefix'])
for t in Product.get_tickets(self._env, self._data['prefix']):
ticket = Ticket(env, t['id'], db)
ticket.save_changes(author, comment, now)
@classmethod
def get_tickets(cls, env, product=''):
"""Retrieve all tickets associated with the product."""
from multiproduct.ticket.query import ProductQuery
from multiproduct.env import ProductEnvironment
if not product and isinstance(env, ProductEnvironment):
product = env.product.prefix
q = ProductQuery.from_string(env, 'product=%s' % product)
return q.execute()
class ProductResourceMap(ModelBase):
"""Table representing the mapping of resources to their product"""
_meta = {'table_name': 'bloodhound_productresourcemap',
'object_name': 'ProductResourceMapping',
'key_fields': ['id'],
'non_key_fields': ['product_id', 'resource_type', 'resource_id'],
'no_change_fields': ['id'],
'unique_fields': [],
'auto_inc_fields': ['id'],
}
def reparent_resource(self, product=None):
"""a specific function to update a record when it is to move product"""
if product is not None:
new_product = Product(self._env, product)
if not new_product._exists:
sdata = {'new_table': product}
sdata.update(self.meta)
raise TracError('%(object_name)s %(new_table)s does not exist'
% sdata)
self._data['product_id'] = product
self.update()
# -------------------------------------------
# Configuration
# -------------------------------------------
class ProductSetting(ModelBase):
"""The Product configuration table
"""
_meta = {'table_name': 'bloodhound_productconfig',
'object_name': 'ProductSetting',
'key_fields': ['product', 'section', 'option'],
'non_key_fields': ['value', ],
'no_change_fields': ['product', 'section', 'option'],
'unique_fields': [],
}
@classmethod
def exists(cls, env, product, section=None, option=None, db=None):
"""Determine whether there are configuration values for
product, section, option .
"""
if product is None:
raise ValueError("Product prefix required")
l = locals()
option_subkey = ([c, l[c]] for c in ('product', 'section', 'option'))
option_subkey = dict(c for c in option_subkey if c[1] is not None)
return len(cls.select(env, db, where=option_subkey, limit=1)) > 0
@classmethod
def get_sections(cls, env, product):
"""Retrieve configuration sections defined for a product
"""
# FIXME: Maybe something more ORM-ish should be added in ModelBase
return [row[0] for row in env.db_query("""SELECT DISTINCT section
FROM bloodhound_productconfig WHERE product = %s""",
(product,))]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/core.py | bloodhound_multiproduct/multiproduct/core.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.core import ComponentMeta, ExtensionPoint
class MultiProductExtensionPoint(ExtensionPoint):
"""Marker class for multiproduct extension points in components."""
def extensions(self, component):
"""Return a multiproduct aware list of components that declare to
implement the extension point interface.
When accessed in product environment, only components for that
environment are returned.
When accessed in global environment, a separate instance will be
returned for global and each of the product environments.
"""
compmgr = component.compmgr
if not hasattr(compmgr, 'parent') or compmgr.parent is not None:
return \
super(MultiProductExtensionPoint, self).extensions(component)
classes = ComponentMeta._registry.get(self.interface, ())
components = [component.compmgr[cls] for cls in classes]
components += [
env[cls]
for cls in classes
for env in component.compmgr.all_product_envs()
]
return [c for c in components if c]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/product_admin.py | bloodhound_multiproduct/multiproduct/product_admin.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.admin.api import IAdminCommandProvider, AdminCommandError,\
AdminCommandManager
from trac.admin.console import TracAdmin, TRAC_VERSION
from trac.admin.web_ui import AdminModule
from trac.core import *
from trac.config import *
from trac.perm import PermissionSystem
from trac.resource import ResourceNotFound
from trac.ticket.admin import TicketAdminPanel, _save_config
from trac.util import getuser, lazy
from trac.util.text import print_table, to_unicode, printerr, printout
from trac.web.api import HTTPNotFound, IRequestFilter, IRequestHandler
from trac.web.chrome import Chrome, add_notice, add_warning
from multiproduct.env import ProductEnvironment
from multiproduct.model import Product
from multiproduct.perm import sudo
from multiproduct.util import ReplacementComponent
from multiproduct.util.translation import _, N_, gettext, ngettext
import multiproduct.versioncontrol
import trac.versioncontrol.admin
from trac.versioncontrol import DbRepositoryProvider, RepositoryManager
#--------------------------
# Product admin panel
#--------------------------
class ProductAdminPanel(TicketAdminPanel):
"""The Product Admin Panel"""
_type = 'products'
_label = ('Product','Products')
def get_admin_commands(self):
if not isinstance(self.env, ProductEnvironment):
yield ('product add', '<prefix> <owner> <name>',
'Add a new product',
None, self._do_product_add)
yield ('product chown', '<prefix> <owner>',
'Change product ownership',
self._complete_product, self._do_product_chown)
yield ('product list', '',
'Show available products',
None, self._do_product_list)
yield ('product remove', '<prefix>',
'Remove/uninstall a product',
self._complete_product, self._do_product_remove)
yield ('product rename', '<prefix> <newname>',
'Rename a product',
self._complete_product, self._do_product_rename)
def get_admin_panels(self, req):
if isinstance(req.perm.env, ProductEnvironment):
return None
return super(ProductAdminPanel, self).get_admin_panels(req)
def _render_admin_panel(self, req, cat, page, product):
req.perm.require('PRODUCT_VIEW')
name = req.args.get('name')
description = req.args.get('description','')
prefix = req.args.get('prefix') if product is None else product
owner = req.args.get('owner')
keys = {'prefix':prefix}
field_data = {'name':name,
'description':description,
'owner':owner,
}
data = {}
# Detail view?
if product:
prod = Product(self.env, keys)
if req.method == 'POST':
if req.args.get('save'):
req.perm.require('PRODUCT_MODIFY')
prod.update_field_dict(field_data)
prod.update(req.authname)
add_notice(req, _('Your changes have been saved.'))
req.redirect(req.href.admin(cat, page))
elif req.args.get('cancel'):
req.redirect(req.href.admin(cat, page))
Chrome(self.env).add_wiki_toolbars(req)
data = {'view': 'detail', 'product': prod}
else:
default = self.config.get('ticket', 'default_product')
if req.method == 'POST':
# Add Product
if req.args.get('add'):
req.perm.require('PRODUCT_CREATE')
if not (prefix and name and owner):
if not prefix:
add_warning(req, _("You must provide a prefix "
"for the product."))
if not name:
add_warning(req, _("You must provide a name "
"for the product."))
if not owner:
add_warning(req, _("You must provide an owner "
"for the product."))
data['prefix'] = prefix
data['name'] = name
data['owner'] = owner
else:
try:
prod = Product(self.env, keys)
except ResourceNotFound:
prod = Product(self.env)
prod.update_field_dict(keys)
prod.update_field_dict(field_data)
prod.insert()
add_notice(req, _('The product "%(id)s" has been '
'added.', id=prefix))
req.redirect(req.href.admin(cat, page))
else:
if prod.prefix is None:
raise TracError(_('Invalid product id.'))
raise TracError(_("Product %(id)s already exists.",
id=prefix))
# Remove product
elif req.args.get('remove'):
raise TracError(_('Product removal is not allowed!'))
# Set default product
elif req.args.get('apply'):
prefix = req.args.get('default')
if prefix and prefix != default:
self.log.info("Setting default product to %s",
prefix)
self.config.set('ticket', 'default_product',
prefix)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
# Clear default product
elif req.args.get('clear'):
self.log.info("Clearing default product")
self.config.set('ticket', 'default_product', '')
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
data['view'] = 'list'
data['products'] = Product.select(self.env)
data['default'] = default
if self.config.getbool('ticket', 'restrict_owner'):
perm = PermissionSystem(self.env)
def valid_owner(username):
return perm.get_user_permissions(username).get('TICKET_MODIFY')
data['owners'] = [username for username, name, email
in self.env.get_known_users()
if valid_owner(username)]
data['owners'].insert(0, '')
data['owners'].sort()
else:
data['owners'] = None
return 'admin_products.html', data
def load_product(self, prefix):
products = Product.select(self.env, where={'prefix' : prefix})
if not products:
raise AdminCommandError('Unknown product %s' % (prefix,))
return products[0]
def _complete_product(self, args):
if len(args) == 1:
return get_products(self.env)
def _do_product_add(self, prefix, owner, name):
product = Product(self.env)
product._data.update({'prefix':prefix, 'name':name, 'owner':owner})
try:
product.insert()
except TracError, exc:
raise AdminCommandError(to_unicode(exc))
def _do_product_chown(self, prefix, owner):
product = self.load_product(prefix)
product._data['owner'] = owner
product.update(getuser())
def _do_product_list(self):
if not isinstance(self.env, ProductEnvironment):
print_table([(p.prefix, p.owner, p.name)
for p in Product.select(self.env)],
[_('Prefix'), _('Owner'), _('Name')])
def _do_product_remove(self, prefix):
raise AdminCommandError(_("Command 'product remove' not supported yet"))
def _do_product_rename(self, prefix, newname):
product = self.load_product(prefix)
product._data['name'] = newname
product.update(getuser())
#--------------------------
# Advanced administration in product context
#--------------------------
class IProductAdminAclContributor(Interface):
"""Interface implemented by components contributing with entries to the
access control white list in order to enable admin panels in product
context.
**Notice** that deny entries configured by users in the blacklist
(i.e. using TracIni `admin_blacklist` option in `multiproduct` section)
will override these entries.
"""
def enable_product_admin_panels():
"""Return a sequence of `(cat_id, panel_id)` tuples that will be
enabled in product context unless specified otherwise in configuration.
If `panel_id` is set to `'*'` then all panels in section `cat_id`
will have green light.
"""
class ProductAdminModule(Component):
"""Leverage administration panels in product context based on the
combination of white list and black list.
"""
implements(IAdminCommandProvider, IRequestFilter, IRequestHandler)
acl_contributors = ExtensionPoint(IProductAdminAclContributor)
raw_blacklist = ListOption('multiproduct', 'admin_blacklist',
doc="""Do not show any product admin panels in this list even if
allowed by white list. Value must be a comma-separated list of
`cat:id` strings respectively identifying the section and identifier
of target admin panel. Empty values of `cat` and `id` will be ignored
and warnings emitted if TracLogging is enabled. If `id` is set
to `*` then all panels in `cat` section will be added to blacklist
while in product context.""")
@lazy
def acl(self):
"""Access control table based on blacklist and white list.
"""
# FIXME : Use an immutable (mapping?) type
acl = {}
if isinstance(self.env, ProductEnvironment):
for acl_c in self.acl_contributors:
for cat_id, panel_id in acl_c.enable_product_admin_panels():
if cat_id and panel_id:
if panel_id == '*':
acl[cat_id] = True
else:
acl[(cat_id, panel_id)] = True
else:
self.log.warning('Invalid panel %s in white list',
panel_id)
# Blacklist entries will override those in white list
warnings = []
for panelref in self.raw_blacklist:
try:
cat_id, panel_id = panelref.split(':')
except ValueError:
cat_id = panel_id = ''
if cat_id and panel_id:
if panel_id == '*':
acl[cat_id] = False
else:
acl[(cat_id, panel_id)] = False
else:
warnings.append(panelref)
if warnings:
self.log.warning("Invalid panel descriptors '%s' in blacklist",
','.join(warnings))
return acl
# IAdminCommandProvider methods
def get_admin_commands(self):
if not isinstance(self.env, ProductEnvironment):
yield ('product admin', '<PREFIX> <admin command>',
'Execute admin (sub-)command upon product resources',
self._complete_product_admin, self._do_product_admin)
def product_admincmd_mgr(self, prefix):
try:
product_env = ProductEnvironment.lookup_env(self.env, prefix)
except LookupError:
raise AdminCommandError('Unknown product %s' % (prefix,))
else:
return AdminCommandManager(product_env)
def _complete_product_admin(self, args):
if len(args) == 1:
return get_products(self.env)
else:
mgr = self.product_admincmd_mgr(args[0])
return mgr.complete_command(args[1:])
GLOBAL_COMMANDS = ('deploy', 'hotcopy', 'initenv', 'upgrade')
def _do_product_admin(self, prefix, *args):
mgr = self.product_admincmd_mgr(prefix)
if args and args[0] in self.GLOBAL_COMMANDS:
raise AdminCommandError('%s command not supported for products' %
(args[0],))
if args and args[0] == 'help':
help_args = args[1:]
if help_args:
doc = mgr.get_command_help(list(help_args))
if doc:
TracAdmin.print_doc(doc)
else:
printerr(_("No documentation found for '%(cmd)s'."
" Use 'help' to see the list of commands.",
cmd=' '.join(help_args)))
cmds = mgr.get_similar_commands(help_args[0])
if cmds:
printout('')
printout(ngettext("Did you mean this?",
"Did you mean one of these?",
len(cmds)))
for cmd in cmds:
printout(' ' + cmd)
else:
printout(_("trac-admin - The Trac Administration Console "
"%(version)s", version=TRAC_VERSION))
env = mgr.env
TracAdmin.print_doc(TracAdmin.all_docs(env), short=True)
else:
try:
mgr.execute_command(*args)
except AdminCommandError, e:
printerr(_("Error: %(msg)s", msg=to_unicode(e)))
if e.show_usage:
print
self._do_product_admin(prefix, 'help', *args[:2])
except:
raise
# IRequestFilter methods
def pre_process_request(self, req, handler):
"""Intercept admin requests in product context if `TRAC_ADMIN`
expectations are not met.
"""
if isinstance(self.env, ProductEnvironment) and \
handler is AdminModule(self.env) and \
not req.perm.has_permission('TRAC_ADMIN') and \
req.perm.has_permission('PRODUCT_ADMIN'):
# Intercept admin request
return self
return handler
def post_process_request(self, req, template, data, content_type):
return template, data, content_type
# IRequestHandler methods
def match_request(self, req):
"""Never match a request"""
def process_request(self, req):
"""Anticipate permission error to hijack admin panel dispatching
process in product context if `TRAC_ADMIN` expectations are not met.
"""
# TODO: Verify `isinstance(self.env, ProductEnvironment)` once again ?
cat_id = req.args.get('cat_id')
panel_id = req.args.get('panel_id')
if self._check_panel(cat_id, panel_id):
with sudo(req):
return self.global_process_request(req)
else:
raise HTTPNotFound(_('Unknown administration panel'))
global_process_request = AdminModule.process_request.im_func
# Internal methods
def _get_panels(self, req):
if isinstance(self.env, ProductEnvironment):
panels, providers = AdminModule(self.env)._get_panels(req)
# Filter based on ACLs
panels = [p for p in panels if self._check_panel(p[0], p[2])]
# providers = dict([k, p] for k, p in providers.iteritems()
# if self._check_panel(*k))
return panels, providers
else:
return [], []
def _check_panel(self, cat_id, panel_id):
cat_allow = self.acl.get(cat_id)
panel_allow = self.acl.get((cat_id, panel_id))
return cat_allow is not False and panel_allow is not False \
and (cat_allow, panel_allow) != (None, None) \
and (cat_id, panel_id) != ('general', 'plugin') # double-check !
def get_products(env):
return [p.prefix for p in Product.select(env)]
class DefaultProductAdminWhitelist(Component):
implements(IProductAdminAclContributor)
# IProductAdminAclContributor methods
def enable_product_admin_panels(self):
yield 'general', 'basics'
yield 'general', 'perm'
yield 'accounts', 'notification'
# FIXME: Include users admin panel ?
#yield 'accounts', 'users'
yield 'ticket', '*'
yield 'versioncontrol', 'repository'
class ProductRepositoryAdminPanel(ReplacementComponent, trac.versioncontrol.admin.RepositoryAdminPanel):
"""Web admin panel for repository administration, product-aware."""
implements(trac.admin.IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'VERSIONCONTROL_ADMIN' in req.perm:
yield ('versioncontrol', _('Version Control'), 'repository',
_('Repository Links') if isinstance(self.env, ProductEnvironment)
else _('Repositories'))
def render_admin_panel(self, req, category, page, path_info):
if not isinstance(self.env, ProductEnvironment):
return super(ProductRepositoryAdminPanel, self).render_admin_panel(
req, category, page, path_info)
req.perm.require('VERSIONCONTROL_ADMIN')
db_provider = self.env[DbRepositoryProvider]
if req.method == 'POST' and db_provider:
if req.args.get('remove'):
repolist = req.args.get('sel')
if repolist:
if isinstance(repolist, basestring):
repolist = [repolist, ]
for reponame in repolist:
db_provider.unlink_product(reponame)
elif req.args.get('addlink') is not None and db_provider:
reponame = req.args.get('repository')
db_provider.link_product(reponame)
req.redirect(req.href.admin(category, page))
# Retrieve info for all product repositories
rm_product = RepositoryManager(self.env)
rm_product.reload_repositories()
all_product_repos = rm_product.get_all_repositories()
repositories = dict((reponame, self._extend_info(
reponame, info.copy(), True))
for (reponame, info) in
all_product_repos.iteritems())
types = sorted([''] + rm_product.get_supported_types())
# construct a list of all repositores not linked to this product
rm = RepositoryManager(self.env.parent)
all_repos = rm.get_all_repositories()
unlinked_repositories = dict([(k, all_repos[k]) for k in
sorted(set(all_repos) - set(all_product_repos))])
data = {'types': types, 'default_type': rm_product.repository_type,
'repositories': repositories,
'unlinked_repositories': unlinked_repositories}
return 'repository_links.html', data
trac.versioncontrol.admin.RepositoryAdminPanel = ProductRepositoryAdminPanel
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/config.py | bloodhound_multiproduct/multiproduct/config.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__all__ = 'Configuration', 'Section'
import os.path
from trac.config import Configuration, ConfigurationError, Option, \
OrderedExtensionsOption, Section, _use_default
from trac.resource import ResourceNotFound
from trac.util import create_file
from trac.util.text import to_unicode
from multiproduct.model import ProductSetting
from multiproduct.perm import MultiproductPermissionPolicy
class Configuration(Configuration):
"""Product-aware settings repository equivalent to instances of
`trac.config.Configuration` (and thus `ConfigParser` from the
Python Standard Library) but retrieving configuration values
from the database.
"""
CONFIG_LOCK_FILE = 'config.lock'
def __init__(self, env, product, parents=None):
"""Initialize configuration object with an instance of
`trac.env.Environment` and product prefix.
Optionally it is possible to inherit settings from parent
Configuration objects. Environment's configuration will not
be added to parents list.
"""
self.env = env
self.product = to_unicode(product)
self._sections = {}
self._lastmtime = 0
self._lock_path = os.path.join(self.env.path, self.CONFIG_LOCK_FILE)
if not os.path.exists(self._lock_path):
create_file(self._lock_path)
self._orig_parents = parents
self._setup_parents(parents)
def __getitem__(self, name):
"""Return the configuration section with the specified name.
"""
if name not in self._sections:
self._sections[name] = Section(self, name)
return self._sections[name]
def get_lock_file_mtime(self):
"""Returns to modification time of the lock file."""
return os.path.getmtime(self._lock_path)
def sections(self, compmgr=None, defaults=True):
"""Return a list of section names.
If `compmgr` is specified, only the section names corresponding to
options declared in components that are enabled in the given
`ComponentManager` are returned.
"""
sections = set(to_unicode(s) \
for s in ProductSetting.get_sections(self.env, self.product))
for parent in self.parents:
sections.update(parent.sections(compmgr, defaults=False))
if defaults:
sections.update(self.defaults(compmgr))
return sorted(sections)
def has_option(self, section, option, defaults=True):
"""Returns True if option exists in section in either the project
trac.ini or one of the parents, or is available through the Option
registry.
(since Trac 0.11)
"""
if ProductSetting.exists(self.env, self.product, section, option):
return True
for parent in self.parents:
if parent.has_option(section, option, defaults=False):
return True
return defaults and (section, option) in Option.registry
def save(self):
"""Just touch config lock file.
Notice: In contrast to Trac's Configuration objects Bloodhound's
product configuration objects commit changes to the database
immediately. Thus there's no much to do in this method.
"""
self.touch()
self._lastmtime = self.get_lock_file_mtime()
def parse_if_needed(self, force=False):
"""Invalidate options cache considering global lock timestamp.
Notice: Opposite to Trac's Configuration objects Bloodhound's
product configuration objects commit changes to the database
immediately. Thus there's no much to do in this method.
"""
changed = False
modtime = self.get_lock_file_mtime()
if force or modtime > self._lastmtime:
self._sections = {}
self._lastmtime = modtime
changed = True
if changed:
self._setup_parents(self._orig_parents)
else:
for parent in self.parents:
changed |= parent.parse_if_needed(force=force)
return changed
def touch(self):
if os.access(self._lock_path, os.W_OK):
os.utime(self._lock_path, None)
def set_defaults(self, compmgr=None):
"""Retrieve all default values and store them explicitly in the
configuration, so that they can be saved to file.
Values already set in the configuration are not overridden.
"""
for section, default_options in self.defaults(compmgr).items():
for name, value in default_options.items():
if not ProductSetting.exists(self.env, self.product,
section, name):
if any(parent[section].contains(name, defaults=False)
for parent in self.parents):
value = None
self.set(section, name, value)
# Helper methods
def _setup_parents(self, parents=None):
"""Inherit configuration from parent `Configuration` instances.
If there's a value set to 'file' option in 'inherit' section then
it will be considered as a list of paths to .ini files
that will be added to parents list as well.
"""
from trac import config
self.parents = (parents or [])
for filename in self.get('inherit', 'file').split(','):
filename = Section._normalize_path(filename.strip(), self.env)
self.parents.append(config.Configuration(filename))
class Section(Section):
"""Proxy for a specific configuration section.
Objects of this class should not be instantiated directly.
"""
__slots__ = ['config', 'name', 'overridden', '_cache']
@staticmethod
def optionxform(optionstr):
return to_unicode(optionstr.lower())
def __init__(self, config, name):
self.config = config
self.name = to_unicode(name)
self.overridden = {}
self._cache = {}
@property
def env(self):
return self.config.env
@property
def product(self):
return self.config.product
def contains(self, key, defaults=True):
key = self.optionxform(key)
if ProductSetting.exists(self.env, self.product, self.name, key):
return True
for parent in self.config.parents:
if parent[self.name].contains(key, defaults=False):
return True
return defaults and (self.name, key) in Option.registry
__contains__ = contains
def iterate(self, compmgr=None, defaults=True):
"""Iterate over the options in this section.
If `compmgr` is specified, only return default option values for
components that are enabled in the given `ComponentManager`.
"""
options = set()
name_str = self.name
for setting in ProductSetting.select(self.env,
where={'product': self.product,
'section': name_str}):
option = self.optionxform(setting.option)
options.add(option)
yield option
for parent in self.config.parents:
for option in parent[self.name].iterate(defaults=False):
loption = self.optionxform(option)
if loption not in options:
options.add(loption)
yield option
if defaults:
for section, option in Option.get_registry(compmgr).keys():
if section == self.name and \
self.optionxform(option) not in options:
yield option
__iter__ = iterate
def __repr__(self):
return '<%s [%s , %s]>' % (self.__class__.__name__,
self.product, self.name)
def get(self, key, default=''):
"""Return the value of the specified option.
Valid default input is a string. Returns a string.
"""
key = self.optionxform(key)
cached = self._cache.get(key, _use_default)
if cached is not _use_default:
return cached
name_str = self.name
key_str = to_unicode(key)
settings = ProductSetting.select(self.env,
where={'product': self.product,
'section': name_str,
'option': key_str})
if len(settings) > 0:
value = settings[0].value
else:
for parent in self.config.parents:
value = parent[self.name].get(key, _use_default)
if value is not _use_default:
break
else:
if default is not _use_default:
option = Option.registry.get((self.name, key))
value = option.default if option else _use_default
else:
value = _use_default
if value is _use_default:
return default
if not value:
value = u''
elif isinstance(value, basestring):
value = to_unicode(value)
self._cache[key] = value
return value
def getpath(self, key, default=''):
"""Return a configuration value as an absolute path.
Relative paths are resolved relative to `conf` subfolder
of the target global environment. This approach is consistent
with TracIni path resolution.
Valid default input is a string. Returns a normalized path.
(enabled since Trac 0.11.5)
"""
path = self.get(key, default)
if not path:
return default
return self._normalize_path(path, self.env)
def remove(self, key):
"""Delete a key from this section.
Like for `set()`, the changes won't persist until `save()` gets called.
"""
key_str = self.optionxform(key)
option_key = {
'product': self.product,
'section': self.name,
'option': key_str
}
try:
setting = ProductSetting(self.env, keys=option_key)
except ResourceNotFound:
self.env.log.warning("No record for product option %s", option_key)
else:
self._cache.pop(key, None)
setting.delete()
self.env.log.info("Removing product option %s", option_key)
def set(self, key, value):
"""Change a configuration value.
These changes will be persistent right away.
"""
key_str = self.optionxform(key)
value_str = to_unicode(value)
self._cache.pop(key_str, None)
option_key = {
'product': self.product,
'section': self.name,
'option': key_str,
}
try:
setting = ProductSetting(self.env, option_key)
except ResourceNotFound:
if value is not None:
# Insert new record in the database
setting = ProductSetting(self.env)
setting._data.update(option_key)
setting._data['value'] = value_str
self.env.log.debug('Writing option %s', setting._data)
setting.insert()
else:
if value is None:
# Delete existing record from the database
# FIXME : Why bother with setting overriden
self.overridden[key] = True
setting.delete()
else:
# Update existing record
setting._data['value'] = value
setting.update()
# Helper methods
@staticmethod
def _normalize_path(path, env):
if not os.path.isabs(path):
path = os.path.join(env.path, 'conf', path)
return os.path.normcase(os.path.realpath(path))
#--------------------
# Option override classes
#--------------------
class ProductPermissionPolicyOption(OrderedExtensionsOption):
"""Prepend an instance of `multiproduct.perm.MultiproductPermissionPolicy`
"""
def __get__(self, instance, owner):
# FIXME: Better handling of recursive imports
from multiproduct.env import ProductEnvironment
if instance is None:
return self
components = OrderedExtensionsOption.__get__(self, instance, owner)
env = getattr(instance, 'env', None)
return [MultiproductPermissionPolicy(env)] + components \
if isinstance(env, ProductEnvironment) \
else components
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/__init__.py | bloodhound_multiproduct/multiproduct/__init__.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from multiproduct.api import MultiProductSystem
from multiproduct.product_admin import ProductAdminPanel
from multiproduct.ticket import *
from multiproduct.web_ui import ProductModule
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/versioncontrol.py | bloodhound_multiproduct/multiproduct/versioncontrol.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os.path
from trac.util.concurrency import threading
from trac.core import implements
import trac.versioncontrol.api
from trac.versioncontrol import RepositoryManager
import trac.admin
import trac.web.api
import trac.resource
from multiproduct.util import ReplacementComponent
from multiproduct.env import ProductEnvironment
class DbRepositoryProvider(ReplacementComponent, trac.versioncontrol.api.DbRepositoryProvider):
"""Inplace replacement for trac.versioncontrol.api.DbRepositoryProvider. Filters
repositories based on soft-links to products. Soft links are stored in 'product'
repository attribute, separated by comma."""
repository_attrs = trac.versioncontrol.api.DbRepositoryProvider.repository_attrs + tuple(['product'])
implements(trac.versioncontrol.api.IRepositoryProvider,
trac.admin.IAdminCommandProvider)
# IRepositoryProvider methods
def get_repositories(self):
"""Retrieve list of repositories specified in the repository
table for current product environment
"""
if isinstance(self.env, ProductEnvironment):
repos = {}
for id, name, value in self.env.db_direct_query(
"SELECT id, name, value FROM repository WHERE name IN (%s)"
% ",".join("'%s'" % each for each in self.repository_attrs)):
if value is not None:
repos.setdefault(id, {})[name] = value
reponames = {}
for id, info in repos.iteritems():
if not 'product' in info or \
not self.env.product.prefix in info['product'].split(','):
# skip repository if not soft linked from the current
# product environment
continue
if 'product' in info:
del info['product']
if 'name' in info and ('dir' in info or 'alias' in info):
info['id'] = id
reponames[info['name']] = info
return reponames.iteritems()
else:
return super(DbRepositoryProvider, self).get_repositories()
def _get_repository_links(self, repoid):
with self.env.db_direct_query as db:
rows = db("""SELECT value FROM repository WHERE id=%s
AND name='product'""" % (repoid,))
if rows:
return rows[0][0].split(',')
return None
def link_product(self, reponame):
if not isinstance(self.env, ProductEnvironment):
return
rm = RepositoryManager(self.env.parent)
repoid = rm.get_repository_id(reponame)
links = self._get_repository_links(repoid)
with self.env.db_direct_transaction as db:
if links:
links.append(self.env.product.prefix)
db("""UPDATE repository SET value=%s WHERE id=%s
AND name='product'""", (','.join(links), repoid))
else:
db("""INSERT INTO repository (id, name, value)
VALUES(%s, 'product', '%s')""" %
(repoid, self.env.product.prefix))
def unlink_product(self, reponame):
if not isinstance(self.env, ProductEnvironment):
return
rm = RepositoryManager(self.env.parent)
repoid = rm.get_repository_id(reponame)
links = self._get_repository_links(repoid)
links.remove(self.env.product.prefix)
with self.env.db_direct_transaction as db:
if len(links) > 0:
db("""UPDATE repository SET value=%s WHERE id=%s
AND name='product'""", (','.join(links), repoid))
else:
db("""DELETE FROM repository WHERE id=%s AND name='product'
AND value='%s'""" % (repoid, self.env.product.prefix))
trac.versioncontrol.api.DbRepositoryProvider = DbRepositoryProvider
trac.versioncontrol.DbRepositoryProvider = DbRepositoryProvider
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/dbcursor.py | bloodhound_multiproduct/multiproduct/dbcursor.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import trac.db.util
from trac.util import concurrency
import sqlparse
import sqlparse.tokens as Tokens
import sqlparse.sql as Types
from multiproduct.cache import lru_cache
from multiproduct.util import using_sqlite_backend, using_mysql_backend
__all__ = ['BloodhoundIterableCursor', 'BloodhoundConnectionWrapper', 'ProductEnvContextManager']
SKIP_TABLES = ['auth_cookie',
'session', 'session_attribute',
'cache',
'repository', 'revision', 'node_change',
'bloodhound_product', 'bloodhound_productresourcemap', 'bloodhound_productconfig',
'sqlite_master', 'bloodhound_relations'
]
TRANSLATE_TABLES = ['system',
'ticket', 'ticket_change', 'ticket_custom',
'attachment',
'enum', 'component', 'milestone', 'version',
'permission',
'wiki',
'report',
]
PRODUCT_COLUMN = 'product'
GLOBAL_PRODUCT = ''
# Singleton used to mark translator as unset
class empty_translator(object):
pass
translator_not_set = empty_translator()
@lru_cache(maxsize=1000)
def translate_sql(env, sql):
translator = None
log = None
product_prefix = None
if env is not None:
if trac.db.api.DatabaseManager(env).debug_sql:
log = env.log
product_prefix = env.product.prefix if env.product else GLOBAL_PRODUCT
translator = BloodhoundProductSQLTranslate(SKIP_TABLES,
TRANSLATE_TABLES,
PRODUCT_COLUMN,
product_prefix,
env)
if log:
log.debug('Original SQl: %s', sql)
realsql = translator.translate(sql) if (translator is not None) else sql
if log:
log.debug('SQL: %s', realsql)
return realsql
class BloodhoundIterableCursor(trac.db.util.IterableCursor):
__slots__ = trac.db.util.IterableCursor.__slots__ + ['_translator']
_tls = concurrency.ThreadLocal(env=None)
def __init__(self, cursor, log=None):
super(BloodhoundIterableCursor, self).__init__(cursor, log=log)
def execute(self, sql, args=None):
return super(BloodhoundIterableCursor, self).execute(translate_sql(self.env, sql), args=args)
def executemany(self, sql, args=None):
return super(BloodhoundIterableCursor, self).executemany(translate_sql(self.env, sql), args=args)
@property
def env(self):
return self._tls.env
@classmethod
def set_env(cls, env):
cls._tls.env = env
@classmethod
def get_env(cls):
return cls._tls.env
@classmethod
def cache_reset(cls):
translate_sql.clear()
# replace trac.db.util.IterableCursor with BloodhoundIterableCursor
trac.db.util.IterableCursor = BloodhoundIterableCursor
class BloodhoundConnectionWrapper(object):
def __init__(self, connection, env):
self.connection = connection
self.env = env
def __getattr__(self, name):
return getattr(self.connection, name)
def execute(self, query, params=None):
BloodhoundIterableCursor.set_env(self.env)
return self.connection.execute(query, params=params)
__call__ = execute
def executemany(self, query, params=None):
BloodhoundIterableCursor.set_env(self.env)
return self.connection.executemany(query, params=params)
def cursor(self):
return BloodhoundCursorWrapper(self.connection.cursor(), self.env)
class BloodhoundCursorWrapper(object):
def __init__(self, cursor, env):
self.cursor = cursor
self.env = env
def __getattr__(self, name):
return getattr(self.cursor, name)
def __iter__(self):
return self.cursor.__iter__()
def execute(self, sql, args=None):
BloodhoundIterableCursor.set_env(self.env)
return self.cursor.execute(sql, args=args)
def executemany(self, sql, args=None):
BloodhoundIterableCursor.set_env(self.env)
return self.cursor.executemany(sql, args=args)
class ProductEnvContextManager(object):
"""Wrap an underlying database context manager so as to keep track
of (nested) product context.
"""
def __init__(self, context, env=None):
"""Initialize product database context.
:param context: Inner database context (e.g. `QueryContextManager`,
`TransactionContextManager` )
:param env: An instance of either `trac.env.Environment` or
`multiproduct.env.ProductEnvironment` used to
reduce the scope of database queries. If set
to `None` then SQL queries will not be translated,
which is equivalent to having direct database access.
"""
self.db_context = context
self.env = env
def __enter__(self):
"""Keep track of previous product context and override it with `env`;
then enter the inner database context.
"""
return BloodhoundConnectionWrapper(self.db_context.__enter__(), self.env)
def __exit__(self, et, ev, tb):
"""Uninstall current product context by restoring the last one;
then leave the inner database context.
"""
return self.db_context.__exit__(et, ev, tb)
def __call__(self, *args, **kwargs):
"""Forward attribute access to nested database context on failure.
"""
BloodhoundIterableCursor.set_env(self.env)
return self.db_context(*args, **kwargs)
def __getattr__(self, attrnm):
"""Forward attribute access to nested database context on failure.
"""
return getattr(self.db_context, attrnm)
def execute(self, sql, params=None):
BloodhoundIterableCursor.set_env(self.env)
return self.db_context.execute(sql, params=params)
def executemany(self, sql, params=None):
BloodhoundIterableCursor.set_env(self.env)
return self.db_context.executemany(sql, params=params)
class BloodhoundProductSQLTranslate(object):
_join_statements = ['LEFT JOIN', 'LEFT OUTER JOIN',
'RIGHT JOIN', 'RIGHT OUTER JOIN',
'JOIN', 'INNER JOIN']
_from_end_words = ['WHERE', 'GROUP', 'HAVING', 'ORDER', 'UNION', 'LIMIT']
def __init__(self, skip_tables, translate_tables, product_column, product_prefix, env=None):
self._skip_tables = skip_tables
self._translate_tables = translate_tables
self._product_column = product_column
self._product_prefix = product_prefix
self._id_calculated = env is None or using_sqlite_backend(env) \
or using_mysql_backend(env)
def _sqlparse_underline_hack(self, token):
underline_token = lambda token: token.ttype == Tokens.Token.Error and token.value == '_'
identifier_token = lambda token: isinstance(token, Types.Identifier) or isinstance(token, Types.Token)
def prefix_token(token, prefix):
if identifier_token(token):
if isinstance(token, Types.IdentifierList):
token = token.tokens[0]
token.value = prefix + token.value
token.normalized = token.value.upper() if token.ttype in Tokens.Keyword \
else token.value
if hasattr(token, 'tokens'):
if len(token.tokens) != 1:
raise Exception("Internal error, invalid token list")
token.tokens[0].value, token.tokens[0].normalized = token.value, token.normalized
return
if hasattr(token, 'tokens') and token.tokens and len(token.tokens):
current = self._token_first(token)
while current:
leftover = None
if underline_token(current):
prefix = ''
while underline_token(current):
prefix += current.value
prev = current
current = self._token_next(token, current)
self._token_delete(token, prev)
# expression ends with _ ... push the token to parent
if not current:
return prev
prefix_token(current, prefix)
else:
leftover = self._sqlparse_underline_hack(current)
if leftover:
leftover.parent = token
self._token_insert_after(token, current, leftover)
current = leftover if leftover else self._token_next(token, current)
return None
def _select_table_name_alias(self, tokens):
return filter(lambda t: t.upper() != 'AS', [t.value for t in tokens if t.value.strip()])
def _column_expression_name_alias(self, tokens):
return filter(lambda t: t.upper() != 'AS', [t.value for t in tokens if t.value.strip()])
def _select_alias_sql(self, alias):
return ' AS %s' % alias
def _translated_table_view_sql(self, name, alias=None):
sql = "(SELECT * FROM %s WHERE %s='%s')" % (name, self._product_column, self._product_prefix)
if alias:
sql += self._select_alias_sql(alias)
return sql
def _prefixed_table_entity_name(self, tablename):
return '"%s_%s"' % (self._product_prefix, tablename) if self._product_prefix else tablename
def _prefixed_table_view_sql(self, name, alias):
return '(SELECT * FROM %s) AS %s' % (self._prefixed_table_entity_name(name),
alias)
def _token_first(self, parent):
return parent.token_first()
def _token_next_match(self, parent, start_token, ttype, token):
return parent.token_next_match(self._token_idx(parent, start_token), ttype, token)
def _token_next(self, parent, from_token):
return parent.token_next(self._token_idx(parent, from_token))
def _token_prev(self, parent, from_token):
return parent.token_prev(self._token_idx(parent, from_token))
def _token_next_by_instance(self, parent, start_token, klass):
return parent.token_next_by_instance(self._token_idx(parent, start_token), klass)
def _token_next_by_type(self, parent, start_token, ttype):
return parent.token_next_by_type(self._token_idx(parent, start_token), ttype)
def _token_insert_before(self, parent, where, token):
return parent.insert_before(where, token)
def _token_insert_after(self, parent, where, token):
return parent.insert_after(where, token)
def _token_idx(self, parent, token):
return parent.token_index(token)
def _token_delete(self, parent, token):
idx = self._token_idx(parent, token)
del parent.tokens[idx]
return idx
def _token_insert(self, parent, idx, token):
parent.tokens.insert(idx, token)
def _eval_expression_value(self, parent, token):
if isinstance(token, Types.Parenthesis):
t = self._token_first(token)
if t.match(Tokens.Punctuation, '('):
t = self._token_next(token, t)
if t.match(Tokens.DML, 'SELECT'):
self._select(token, t)
def _expression_token_unwind_hack(self, parent, token, start_token):
# hack to workaround sqlparse bug that wrongly presents list of tokens
# as IdentifierList in certain situations
if isinstance(token, Types.IdentifierList):
idx = self._token_delete(parent, token)
for t in token.tokens:
self._token_insert(parent, idx, t)
idx += 1
token = self._token_next(parent, start_token)
return token
def _where(self, parent, where_token):
if isinstance(where_token, Types.Where):
token = self._token_first(where_token)
if not token.match(Tokens.Keyword, 'WHERE'):
raise Exception("Invalid WHERE statement")
while token:
self._eval_expression_value(where_token, token)
token = self._token_next(where_token, token)
return
def _select_expression_tokens(self, parent, first_token, end_words):
if isinstance(first_token, Types.IdentifierList):
return first_token, [list(first_token.flatten())]
tokens = list()
current_list = list()
current_token = first_token
while current_token and not current_token.match(Tokens.Keyword, end_words):
if current_token.match(Tokens.Punctuation, ','):
if current_list:
tokens.append(current_list)
current_list = list()
elif current_token.is_whitespace():
pass
else:
current_list.append(current_token)
current_token = self._token_next(parent, current_token)
if current_list:
tokens.append(current_list)
return current_token, tokens
def _select_join(self, parent, start_token, end_words):
current_token = self._select_from(parent, start_token, ['ON'], force_alias=True)
tokens = list()
if current_token:
current_token = self._token_next(parent, current_token)
while current_token and \
not current_token.match(Tokens.Keyword, end_words) and \
not isinstance(current_token, Types.Where):
tokens.append(current_token)
current_token = self._token_next(parent, current_token)
return current_token
def _select_from(self, parent, start_token, end_words, table_name_callback=None, force_alias=False):
def inject_table_view(token, name, alias):
if name in self._skip_tables:
pass
elif name in self._translate_tables:
if force_alias and not alias:
alias = name
parent.tokens[self._token_idx(parent, token)] = sqlparse.parse(self._translated_table_view_sql(name,
alias=alias))[0]
if table_name_callback:
table_name_callback(name)
else:
if not alias:
alias = name
parent.tokens[self._token_idx(parent, token)] = sqlparse.parse(self._prefixed_table_view_sql(name,
alias))[0]
if table_name_callback:
table_name_callback(name)
def inject_table_alias(token, alias):
parent.tokens[self._token_idx(parent, token)] = sqlparse.parse(self._select_alias_sql(alias))[0]
def process_table_name_tokens(nametokens):
if nametokens:
l = self._select_table_name_alias(nametokens)
if not l:
raise Exception("Invalid FROM table name")
name, alias = l[0], None
alias = l[1] if len(l) > 1 else name
if not name in self._skip_tables:
token = nametokens[0]
for t in nametokens[1:]:
self._token_delete(parent, t)
inject_table_view(token, name, alias)
return list()
current_token = self._token_next(parent, start_token)
prev_token = start_token
table_name_tokens = list()
join_tokens = list()
while current_token and \
not current_token.match(Tokens.Keyword, end_words) and \
not isinstance(current_token, Types.Where):
next_token = self._token_next(parent, current_token)
if current_token.is_whitespace():
pass
elif isinstance(current_token, Types.IdentifierList):
current_token = self._expression_token_unwind_hack(parent, current_token, prev_token)
continue
elif isinstance(current_token, Types.Identifier):
parenthesis = filter(lambda t: isinstance(t, Types.Parenthesis), current_token.tokens)
if parenthesis:
for p in parenthesis:
t = self._token_next(p, self._token_first(p))
if not t.match(Tokens.DML, 'SELECT'):
raise Exception("Invalid subselect statement")
self._select(p, t)
else:
tablename = current_token.value.strip()
tablealias = current_token.get_name().strip()
if tablename == tablealias:
table_name_tokens.append(current_token)
else:
inject_table_view(current_token, tablename, tablealias)
elif isinstance(current_token, Types.Parenthesis):
t = self._token_next(current_token, self._token_first(current_token))
if t.match(Tokens.DML, 'SELECT'):
identifier_token = self._token_next(parent, current_token)
as_token = None
if identifier_token.match(Tokens.Keyword, 'AS'):
as_token = identifier_token
identifier_token = self._token_next(parent, identifier_token)
if not isinstance(identifier_token, Types.Identifier):
raise Exception("Invalid subselect statement")
next_token = self._token_next(parent, identifier_token)
self._select(current_token, t)
if as_token:
self._token_delete(parent, as_token)
inject_table_alias(identifier_token, identifier_token.value)
elif current_token.ttype == Tokens.Punctuation:
if table_name_tokens:
next_token = self._token_next(parent, current_token)
table_name_tokens = process_table_name_tokens(table_name_tokens)
elif current_token.match(Tokens.Keyword, ['JOIN', 'LEFT', 'RIGHT', 'INNER', 'OUTER'] + self._join_statements):
join_tokens.append(current_token.value.strip().upper())
join = ' '.join(join_tokens)
if join in self._join_statements:
join_tokens = list()
table_name_tokens = process_table_name_tokens(table_name_tokens)
next_token = self._select_join(parent,
current_token,
['JOIN', 'LEFT', 'RIGHT', 'INNER', 'OUTER']
+ self._join_statements
+ self._from_end_words)
elif current_token.ttype == Tokens.Keyword or \
current_token.ttype == Tokens.Token.Literal.Number.Integer:
table_name_tokens.append(current_token)
else:
raise Exception("Failed to parse FROM table name")
prev_token = current_token
current_token = next_token
if prev_token:
process_table_name_tokens(table_name_tokens)
return current_token
def _select(self, parent, start_token, insert_table=None):
token = self._token_next(parent, start_token)
fields_token = self._token_next(parent, token) if token.match(Tokens.Keyword, ['ALL', 'DISTINCT']) else token
current_token, field_lists = self._select_expression_tokens(parent, fields_token, ['FROM'] + self._from_end_words)
def handle_insert_table(table_name):
if insert_table and insert_table in self._translate_tables:
if not field_lists or not field_lists[-1]:
raise Exception("Invalid SELECT field list")
last_token = list(field_lists[-1][-1].flatten())[-1]
for keyword in ["'", self._product_prefix, "'", ' ', ',']:
self._token_insert_after(last_token.parent, last_token, Types.Token(Tokens.Keyword, keyword))
return
table_name_callback = handle_insert_table if insert_table else None
from_token = self._token_next_match(parent, start_token, Tokens.Keyword, 'FROM')
if not from_token:
# FROM not always required, example would be SELECT CURRVAL('"ticket_id_seq"')
return current_token
current_token = self._select_from(parent,
from_token, self._from_end_words,
table_name_callback=table_name_callback)
if not current_token:
return None
while current_token:
if isinstance(current_token, Types.Where) or \
current_token.match(Tokens.Keyword, ['GROUP', 'HAVING', 'ORDER', 'LIMIT']):
if isinstance(current_token, Types.Where):
self._where(parent, current_token)
start_token = self._token_next(parent, current_token)
next_token = self._token_next_match(parent,
start_token,
Tokens.Keyword,
self._from_end_words) if start_token else None
elif current_token.match(Tokens.Keyword, ['UNION']):
token = self._token_next(parent, current_token)
if not token.match(Tokens.DML, 'SELECT'):
raise Exception("Invalid SELECT UNION statement")
token = self._select(parent, current_token, insert_table=insert_table)
next_token = self._token_next(parent, token) if token else None
else:
raise Exception("Unsupported SQL statement")
current_token = next_token
return current_token
def _replace_table_entity_name(self, parent, token, table_name, entity_name=None):
if not entity_name:
entity_name = table_name
next_token = self._token_next(parent, token)
if not table_name in self._skip_tables + self._translate_tables:
token_to_replace = parent.tokens[self._token_idx(parent, token)]
if isinstance(token_to_replace, Types.Function):
t = self._token_first(token_to_replace)
if isinstance(t, Types.Identifier):
token_to_replace.tokens[self._token_idx(token_to_replace, t)] = Types.Token(Tokens.Keyword,
self._prefixed_table_entity_name(entity_name))
elif isinstance(token_to_replace, Types.Identifier) or isinstance(token_to_replace, Types.Token):
parent.tokens[self._token_idx(parent, token_to_replace)] = Types.Token(Tokens.Keyword,
self._prefixed_table_entity_name(entity_name))
else:
raise Exception("Internal error, invalid table entity token type")
return next_token
def _insert(self, parent, start_token):
token = self._token_next(parent, start_token)
if not token.match(Tokens.Keyword, 'INTO'):
raise Exception("Invalid INSERT statement")
def insert_extra_columns(tablename, columns_token):
columns_present = []
if tablename in self._translate_tables and \
isinstance(columns_token, Types.Parenthesis):
ptoken = self._token_first(columns_token)
if not ptoken.match(Tokens.Punctuation, '('):
raise Exception("Invalid INSERT statement, expected parenthesis around columns")
ptoken = self._token_next(columns_token, ptoken)
last_token = ptoken
while ptoken:
if isinstance(ptoken, Types.IdentifierList):
if not 'product' in columns_present \
and any(i.get_name() == 'product'
for i in ptoken.get_identifiers()
if isinstance(i, Types.Identifier)):
columns_present.append('product')
elif not 'id' in columns_present \
and tablename == 'ticket' \
and isinstance(ptoken, Types.IdentifierList) \
and any((t.ttype is None or t.is_keyword)
and t.value == 'id'
for t in ptoken.get_identifiers()):
columns_present.append('id')
last_token = ptoken
ptoken = self._token_next(columns_token, ptoken)
if not last_token or \
not last_token.match(Tokens.Punctuation, ')'):
raise Exception("Invalid INSERT statement, unable to find column parenthesis end")
columns_to_insert = []
if not 'product' in columns_present:
columns_to_insert += [',', ' ', self._product_column]
if self._id_calculated \
and tablename == 'ticket'\
and not 'id' in columns_present:
columns_to_insert += [',', ' ', 'id']
for keyword in columns_to_insert:
self._token_insert_before(columns_token, last_token, Types.Token(Tokens.Keyword, keyword))
return columns_present
def insert_extra_column_values(tablename, ptoken, before_token,
columns_present):
if tablename in self._translate_tables:
values_to_insert = []
if not 'product' in columns_present:
values_to_insert += [',', "'", self._product_prefix, "'"]
if self._id_calculated \
and tablename == 'ticket' \
and not 'id' in columns_present:
values_to_insert += [
',', """COALESCE((SELECT MAX(id) FROM
(SELECT * FROM ticket WHERE product='%s')
AS subquery), 0)+1""" %
(self._product_prefix,)
]
for keyword in values_to_insert:
self._token_insert_before(ptoken, before_token, Types.Token(Tokens.Keyword, keyword))
return
tablename = None
table_name_token = self._token_next(parent, token)
columns_present = []
if isinstance(table_name_token, Types.Function):
token = self._token_first(table_name_token)
if isinstance(token, Types.Identifier):
tablename = token.get_name()
columns_token = self._replace_table_entity_name(table_name_token, token, tablename)
if columns_token.match(Tokens.Keyword, 'VALUES'):
token = columns_token
else:
columns_present = insert_extra_columns(tablename, columns_token)
token = self._token_next(parent, table_name_token)
else:
tablename = table_name_token.value
columns_token = self._replace_table_entity_name(parent, table_name_token, tablename)
if columns_token.match(Tokens.Keyword, 'VALUES'):
token = columns_token
else:
columns_present = insert_extra_columns(tablename, columns_token)
token = self._token_next(parent, columns_token)
if token.match(Tokens.Keyword, 'VALUES'):
separators = [',', '(', ')']
token = self._token_next(parent, token)
while token:
if isinstance(token, Types.Parenthesis):
ptoken = self._token_first(token)
if not ptoken.match(Tokens.Punctuation, '('):
raise Exception("Invalid INSERT statement")
last_token = ptoken
while ptoken:
if not ptoken.match(Tokens.Punctuation, separators) and \
not ptoken.match(Tokens.Keyword, separators) and \
not ptoken.is_whitespace():
ptoken = self._expression_token_unwind_hack(token, ptoken, self._token_prev(token, ptoken))
self._eval_expression_value(token, ptoken)
last_token = ptoken
ptoken = self._token_next(token, ptoken)
if not last_token or \
not last_token.match(Tokens.Punctuation, ')'):
raise Exception("Invalid INSERT statement, unable to find column value parenthesis end")
insert_extra_column_values(tablename, token, last_token, columns_present)
elif not token.match(Tokens.Punctuation, separators) and\
not token.match(Tokens.Keyword, separators) and\
not token.is_whitespace():
raise Exception("Invalid INSERT statement, unable to parse VALUES section")
token = self._token_next(parent, token)
elif token.match(Tokens.DML, 'SELECT'):
self._select(parent, token, insert_table=tablename)
else:
raise Exception("Invalid INSERT statement")
return
def _update_delete_where_limit(self, table_name, parent, start_token):
if not start_token:
return
where_token = start_token if isinstance(start_token, Types.Where) \
else self._token_next_by_instance(parent, start_token, Types.Where)
if where_token:
self._where(parent, where_token)
if not table_name in self._translate_tables:
return
if where_token:
keywords = [self._product_column, '=', "'", self._product_prefix, "'", ' ', 'AND', ' ']
keywords.reverse()
token = self._token_first(where_token)
if not token.match(Tokens.Keyword, 'WHERE'):
token = self._token_next_match(where_token, token, Tokens.Keyword, 'WHERE')
if not token:
raise Exception("Invalid UPDATE statement, failed to parse WHERE")
for keyword in keywords:
self._token_insert_after(where_token, token, Types.Token(Tokens.Keyword, keyword))
else:
keywords = ['WHERE', ' ', self._product_column, '=', "'", self._product_prefix, "'"]
limit_token = self._token_next_match(parent, start_token, Tokens.Keyword, 'LIMIT')
if limit_token:
for keyword in keywords:
self._token_insert_before(parent, limit_token, Types.Token(Tokens.Keyword, keyword))
self._token_insert_before(parent, limit_token, Types.Token(Tokens.Keyword, ' '))
else:
last_token = token = start_token
while token:
last_token = token
token = self._token_next(parent, token)
keywords.reverse()
for keyword in keywords:
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/cache.py | bloodhound_multiproduct/multiproduct/cache.py | #
# LRU and LFU cache decorators - licensed under the PSF License
# Developed by Raymond Hettinger
# (http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/)
#
# March 13, 2013 updated by Olemis Lang
# Added keymap arg to build custom keys out of actual args
# March 14, 2013 updated by Olemis Lang
# Keep cache consistency on user function failure
import collections
import functools
from itertools import ifilterfalse
from heapq import nsmallest
from operator import itemgetter
class Counter(dict):
'Mapping where default values are zero'
def __missing__(self, key):
return 0
def lru_cache(maxsize=100, keymap=None):
'''Least-recently-used cache decorator.
Arguments to the cached function must be hashable.
Cache performance statistics stored in f.hits and f.misses.
Clear the cache with f.clear().
http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
:param keymap: build custom keys out of actual arguments.
Its signature will be lambda (args, kwds, kwd_mark)
'''
maxqueue = maxsize * 10
def decorating_function(user_function,
len=len, iter=iter, tuple=tuple, sorted=sorted, KeyError=KeyError):
cache = {} # mapping of args to results
queue = collections.deque() # order that keys have been used
refcount = Counter() # times each key is in the queue
sentinel = object() # marker for looping around the queue
kwd_mark = object() # separate positional and keyword args
# lookup optimizations (ugly but fast)
queue_append, queue_popleft = queue.append, queue.popleft
queue_appendleft, queue_pop = queue.appendleft, queue.pop
@functools.wraps(user_function)
def wrapper(*args, **kwds):
# cache key records both positional and keyword args
if keymap is None:
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
else:
key = keymap(args, kwds, kwd_mark)
# get cache entry or compute if not found
try:
result = cache[key]
wrapper.hits += 1
# record recent use of this key
queue_append(key)
refcount[key] += 1
except KeyError:
# Explicit exception handling for readability
try:
result = user_function(*args, **kwds)
except:
raise
else:
# record recent use of this key
queue_append(key)
refcount[key] += 1
cache[key] = result
wrapper.misses += 1
# purge least recently used cache entry
if len(cache) > maxsize:
key = queue_popleft()
refcount[key] -= 1
while refcount[key]:
key = queue_popleft()
refcount[key] -= 1
del cache[key], refcount[key]
# periodically compact the queue by eliminating duplicate keys
# while preserving order of most recent access
if len(queue) > maxqueue:
refcount.clear()
queue_appendleft(sentinel)
for key in ifilterfalse(refcount.__contains__,
iter(queue_pop, sentinel)):
queue_appendleft(key)
refcount[key] = 1
return result
def clear():
cache.clear()
queue.clear()
refcount.clear()
wrapper.hits = wrapper.misses = 0
wrapper.hits = wrapper.misses = 0
wrapper.clear = clear
return wrapper
return decorating_function
def lfu_cache(maxsize=100, keymap=None):
'''Least-frequenty-used cache decorator.
Arguments to the cached function must be hashable.
Cache performance statistics stored in f.hits and f.misses.
Clear the cache with f.clear().
http://en.wikipedia.org/wiki/Least_Frequently_Used
:param keymap: build custom keys out of actual arguments.
Its signature will be lambda (args, kwds, kwd_mark)
'''
def decorating_function(user_function):
cache = {} # mapping of args to results
use_count = Counter() # times each key has been accessed
kwd_mark = object() # separate positional and keyword args
@functools.wraps(user_function)
def wrapper(*args, **kwds):
if keymap is None:
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
else:
key = keymap(args, kwds, kwd_mark)
use_count[key] += 1
# get cache entry or compute if not found
try:
result = cache[key]
wrapper.hits += 1
except KeyError:
result = user_function(*args, **kwds)
cache[key] = result
wrapper.misses += 1
# purge least frequently used cache entry
if len(cache) > maxsize:
for key, _ in nsmallest(maxsize // 10,
use_count.iteritems(),
key=itemgetter(1)):
del cache[key], use_count[key]
return result
def clear():
cache.clear()
use_count.clear()
wrapper.hits = wrapper.misses = 0
wrapper.hits = wrapper.misses = 0
wrapper.clear = clear
return wrapper
return decorating_function
#----------------------
# Helper functions
#----------------------
def default_keymap(args, kwds, kwd_mark):
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
return key
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/hooks.py | bloodhound_multiproduct/multiproduct/hooks.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# these imports monkey patch classes required to enable
# multi product support
import re
from trac.hooks import EnvironmentFactoryBase, RequestFactoryBase
from trac.web.href import Href
from trac.web.main import RequestWithSession
import multiproduct.env
import multiproduct.dbcursor
import multiproduct.ticket.batch
import multiproduct.ticket.query
import multiproduct.versioncontrol
PRODUCT_RE = re.compile(r'^/products(?:/(?P<pid>[^/]*)(?P<pathinfo>.*))?')
class MultiProductEnvironmentFactory(EnvironmentFactoryBase):
def open_environment(self, environ, env_path, global_env, use_cache=False):
# clearing product environment cache - bh:ticket:613
multiproduct.env.ProductEnvironment.clear_env_cache()
environ.setdefault('SCRIPT_NAME', '') # bh:ticket:594
env = pid = product_path = None
path_info = environ.get('PATH_INFO')
if not path_info:
return env
m = PRODUCT_RE.match(path_info)
if m:
pid = m.group('pid')
product_path = m.group('pathinfo') or ''
def create_product_env(product_prefix, script_name, path_info):
if not global_env._abs_href:
# make sure global environment absolute href is set before
# instantiating product environment. This would normally
# happen from within trac.web.main.dispatch_request
req = RequestWithSession(environ, None)
global_env._abs_href = req.abs_href
try:
env = multiproduct.env.ProductEnvironment(global_env,
product_prefix)
except LookupError:
# bh:ticket:561 - Display product list and warning message
env = global_env
else:
# shift WSGI environment to the left
environ['SCRIPT_NAME'] = script_name
environ['PATH_INFO'] = path_info
return env
if pid and not (product_path in ('', '/') and
environ.get('QUERY_STRING')):
env = create_product_env(pid,
environ['SCRIPT_NAME'] + '/products/' +
pid, product_path)
env.config.parse_if_needed()
return env
class ProductizedHref(Href):
PATHS_NO_TRANSFORM = ['chrome', 'login', 'logout', 'prefs', 'products',
'register', 'reset_password', 'verify_email']
STATIC_PREFIXES = ['css/', 'img/', 'js/']
def __init__(self, global_href, base):
self.super = super(ProductizedHref, self)
self.super.__init__(base)
self._global_href = global_href
def __call__(self, *args, **kwargs):
if args and isinstance(args[0], basestring):
if args[0] in self.PATHS_NO_TRANSFORM or \
(len(args) == 1 and args[0] == 'admin') or \
filter(lambda x: args[0].startswith(x),
self.STATIC_PREFIXES):
return self._global_href(*args, **kwargs)
return self.super.__call__(*args, **kwargs)
class ProductRequestWithSession(RequestWithSession):
def __init__(self, env, environ, start_response):
super(ProductRequestWithSession, self).__init__(environ, start_response)
self.base_url = env.base_url
if isinstance(env, multiproduct.env.ProductEnvironment):
self.href = ProductizedHref(env.parent.href, env.href.base)
self.abs_href = ProductizedHref(env.parent.abs_href,
env.abs_href.base)
class ProductRequestFactory(RequestFactoryBase):
def create_request(self, env, environ, start_response):
return ProductRequestWithSession(env, environ, start_response) \
if env else RequestWithSession(environ, start_response)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/perm.py | bloodhound_multiproduct/multiproduct/perm.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Permission components for Bloodhound product environments"""
from functools import wraps
from trac.core import Component, implements
from trac.perm import IPermissionPolicy, PermissionSystem, PermissionError
__all__ = 'ProductPermissionPolicy',
#--------------------------
# Permission components
#--------------------------
class MultiproductPermissionPolicy(Component):
"""Apply product policy in product environments to deal with TRAC_ADMIN,
PRODUCT_ADMIN and alike.
"""
implements(IPermissionPolicy)
# IPermissionPolicy methods
def check_permission(self, action, username, resource, perm):
# FIXME: Better handling of recursive imports
from multiproduct.env import ProductEnvironment
if isinstance(self.env, ProductEnvironment):
permsys = PermissionSystem(self.env.parent)
if permsys.check_permission('TRAC_ADMIN', username):
return action in PermissionSystem(self.env).get_actions() \
or None # FIXME: maybe False is better
elif username == self.env.product.owner:
# Product owner granted with PRODUCT_ADMIN permission ootb
permsys = PermissionSystem(self.env)
# FIXME: would `action != 'TRAC_ADMIN'` be enough ?
return True if action in permsys.get_actions() and \
action != 'TRAC_ADMIN' \
else None
#--------------------------
# Impersonation helpers
#--------------------------
class SudoPermissionContext(object):
"""Allows a permitted user (by default `PRODUCT_ADMIN`) to execute
a command as if temporarily granted with `TRAC_ADMIN` or other specific
permission. There is also support to revoke some actions unconditionally.
These objects will act as context managers wrapping the permissions cache
of the target request object. Entering the same context more than once
is not supported and will result in unexpected behavior.
"""
def __init__(self, req, require=None, grant=None, revoke=None):
grant = frozenset(grant if grant is not None else ('TRAC_ADMIN',))
revoke = frozenset(revoke or [])
if grant & revoke:
raise ValueError('Impossible to grant and revoke (%s)' %
', '.join(sorted(grant & revoke)))
self.grant = grant
self.revoke = revoke
if req:
self._expand_perms(req.perm.env)
else:
self._expanded = False
self._perm = None
self.req = req
self.require_actions = frozenset(('PRODUCT_ADMIN',) if require is None
else ([require]
if isinstance(require, basestring)
else require))
@property
def perm(self):
return self._perm
@perm.setter
def perm(self, perm):
if perm and not self._expanded:
self._expand_perms(perm.env)
self._perm = perm
def __getattr__(self, attrnm):
# Actually PermissionCache.__slots__ but this will be faster
if attrnm in ('env', 'username', '_resource', '_cache'):
try:
return getattr(self.perm, attrnm)
except AttributeError:
pass
raise AttributeError("'%s' object has no attribute '%s'" %
(self.__class__.__name__, attrnm))
def __enter__(self):
if self.req is None:
# e.g. instances returned by __call__
raise ValueError('Context manager not bound to request object')
req = self.req
for action in self.require_actions:
req.perm.require(action)
self.perm = req.perm
req.perm = self
return self
def __exit__(self, exc_type, exc_value, tb):
self.req.perm = self.perm
self.perm = None
# Internal methods
@property
def is_active(self):
"""Determine whether this context is active
"""
return self.req and self.perm
def _expand_perms(self, env):
permsys = PermissionSystem(env)
grant = frozenset(permsys.expand_actions(self.grant))
revoke = frozenset(permsys.expand_actions(self.revoke))
# Double check ambiguous action lists
if grant & revoke:
raise ValueError('Impossible to grant and revoke (%s)' %
', '.join(sorted(grant & revoke)))
self.grant = grant
self.revoke = revoke
self._expanded = True
def __assert_require(f):
@wraps(f)
def __require(self, *args, **kwargs):
# FIXME : No check ? Transform into assert statement ?
if not self.perm:
raise RuntimeError('Permission check out of context')
if not self.is_active:
for action in self.require_actions:
self.perm.require(action)
return f(self, *args, **kwargs)
return __require
# PermissionCache methods
@__assert_require
def __call__(self, realm_or_resource, id=False, version=False):
newperm = self.perm(realm_or_resource, id, version)
if newperm is self.perm:
return self
else:
newctx = SudoPermissionContext(None, self.require_actions, self.grant,
self.revoke)
newctx.perm = newperm
return newctx
@__assert_require
def has_permission(self, action, realm_or_resource=None, id=False,
version=False):
return action in self.grant or \
(action not in self.revoke and
self.perm.has_permission(action, realm_or_resource, id,
version))
__contains__ = has_permission
@__assert_require
def require(self, action, realm_or_resource=None, id=False, version=False):
if action in self.grant:
return
if action in self.revoke:
resource = self._normalize_resource(realm_or_resource, id, version)
raise PermissionError(action, resource, self.perm.env)
self.perm.require(action, realm_or_resource, id, version)
assert_permission = require
@__assert_require
def permissions(self):
"""Deprecated (but still used by the HDF compatibility layer)
"""
self.perm.env.log.warning("perm.permissions() is deprecated and "
"is only present for HDF compatibility")
permsys = PermissionSystem(self.perm.env)
actions = permsys.get_user_permissions(self.perm.username)
return [action for action in actions if action in self]
del __assert_require
sudo = SudoPermissionContext
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/env.py | bloodhound_multiproduct/multiproduct/env.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os.path
from urlparse import urlsplit
from trac.config import BoolOption, ConfigSection, Option
from trac.core import Component, ComponentManager, ExtensionPoint, implements, \
ComponentMeta
from trac.db.api import TransactionContextManager, QueryContextManager, \
DatabaseManager
from trac.perm import IPermissionRequestor, PermissionSystem
from trac.util import get_pkginfo, lazy
from trac.util.compat import sha1
from trac.util.text import to_unicode, unicode_quote
from trac.versioncontrol import RepositoryManager
from trac.web.href import Href
from multiproduct.api import MultiProductSystem, ISupportMultiProductEnvironment
from multiproduct.cache import lru_cache, default_keymap
from multiproduct.config import Configuration
from multiproduct.dbcursor import BloodhoundConnectionWrapper, BloodhoundIterableCursor, \
ProductEnvContextManager
from multiproduct.model import Product
import trac.env
class ComponentEnvironmentContext(object):
def __init__(self, env, component):
self._env = env
self._component = component
def __enter__(self):
self._old_env = self._component.env
self._env.component_activated(self._component)
return self
def __exit__(self, type, value, traceback):
self._old_env.component_activated(self._component)
class Environment(trac.env.Environment):
"""Bloodhound environment manager
This class is intended as monkey-patch replacement for
trac.env.Environment. Required database access methods/properties
are replaced to provide global view of the database in contrast
to ProductEnvironment that features per-product view of the database
(in the context of selected product).
:param path: the absolute path to the Trac environment
:param create: if `True`, the environment is created and
populated with default data; otherwise, the
environment is expected to already exist.
:param options: A list of `(section, name, value)` tuples that
define configuration options
"""
multi_product_support_components = ExtensionPoint(ISupportMultiProductEnvironment)
@property
def global_setup_participants(self):
"""If multi product schema is enabled, return only setup participants
that implement ISupportMultiProduct. Otherwise, all setup participants
are considered global.
"""
if self._multiproduct_schema_enabled:
all_participants = self.setup_participants
multiproduct_aware = set(self.multi_product_support_components)
priority = lambda x: 0 if isinstance(x, MultiProductSystem) else 10
return sorted(
(c for c in all_participants if c in multiproduct_aware),
key=priority
)
else:
return self.setup_participants
@property
def product_setup_participants(self):
"""If multi product schema is enabled, return setup participants that
need to be instantiated for each product env. Otherwise, return an
empty list.
"""
if self._multiproduct_schema_enabled:
all_participants = self.setup_participants
multiproduct_aware = set(self.multi_product_support_components)
return [
c for c in all_participants if c not in multiproduct_aware
]
else:
return []
def __init__(self, path, create=False, options=[]):
# global environment w/o parent, set these two before super.__init__
# as database access can take place within trac.env.Environment
self.parent = None
self.product = None
# `trac.env.Environment.__init__` is not invoked as creation is handled differently
# from base implementation - different setup participants are invoked when creating
# global environment.
ComponentManager.__init__(self)
self.path = path
self.systeminfo = []
self._href = self._abs_href = None
self._multiproduct_schema_enabled = False
if create:
self.create(options)
else:
self.verify()
self.setup_config()
# invoke `IEnvironmentSetupParticipant.environment_created` for all
# global setup participants
if create:
for participant in self.global_setup_participants:
with ComponentEnvironmentContext(self, participant):
participant.environment_created()
@property
def db_query(self):
return ProductEnvContextManager(super(Environment, self).db_query, self) \
if self._multiproduct_schema_enabled else self.db_direct_query
@property
def db_transaction(self):
return ProductEnvContextManager(super(Environment, self).db_transaction, self) \
if self._multiproduct_schema_enabled else self.db_direct_transaction
@property
def db_direct_query(self):
return ProductEnvContextManager(super(Environment, self).db_query)
@property
def db_direct_transaction(self):
return ProductEnvContextManager(super(Environment, self).db_transaction)
def all_product_envs(self):
return [ProductEnvironment(self, product) for product in Product.select(self)]
def needs_upgrade(self):
"""Return whether the environment needs to be upgraded."""
def needs_upgrade_in_env(participant, env):
with ComponentEnvironmentContext(env, participant):
with env.db_query as db:
if participant.environment_needs_upgrade(db):
self.log.warn("component %s.%s requires environment upgrade in environment %s...",
participant.__module__, participant.__class__.__name__,
env)
return True
if any(needs_upgrade_in_env(participant, self)
for participant in self.global_setup_participants):
return True
# until schema is multi product aware, product environments can't (and shouldn't) be
# instantiated
if self._multiproduct_schema_enabled:
if any(needs_upgrade_in_env(participant, env)
for env in [self] + self.all_product_envs()
for participant in self.product_setup_participants):
return True
return False
def upgrade(self, backup=False, backup_dest=None):
"""Upgrade database.
:param backup: whether or not to backup before upgrading
:param backup_dest: name of the backup file
:return: whether the upgrade was performed
"""
def upgrader_for_env(participant, env):
with ComponentEnvironmentContext(env, participant):
with env.db_query as db:
if participant.environment_needs_upgrade(db):
self.log.info(
"%s.%s needs upgrade in environment %s...",
participant.__module__,
participant.__class__.__name__,
env)
return env, participant
def upgraders_for_product_envs():
upgraders = (upgrader_for_env(participant, env)
for participant in self.product_setup_participants
for env in [self] + self.all_product_envs())
return [u for u in upgraders if u]
# first enumerate components that are multi product aware and
# require upgrade in global environment
global_upgraders = [upgrader_for_env(participant, self)
for participant in self.global_setup_participants]
global_upgraders = [u for u in global_upgraders if u]
product_upgraders = None
if not global_upgraders and self._multiproduct_schema_enabled:
# if no upgrades required in global environment, enumerate
# required upgrades for product environments
product_upgraders = upgraders_for_product_envs()
if not global_upgraders + (product_upgraders or []):
return False
if backup:
try:
self.backup(backup_dest)
except Exception, e:
raise trac.env.BackupError(e)
def execute_upgrades(upgraders_list):
for env, participant in upgraders_list:
self.log.info("%s.%s upgrading in environment %s...",
participant.__module__,
participant.__class__.__name__,
env)
with ComponentEnvironmentContext(env, participant):
with env.db_transaction as db:
participant.upgrade_environment(db)
# Database schema may have changed, so close all connections
DatabaseManager(env).shutdown()
# execute global upgrades first, product environment upgrades next
execute_upgrades(global_upgraders)
if product_upgraders is None and self._multiproduct_schema_enabled:
product_upgraders = upgraders_for_product_envs()
if product_upgraders:
execute_upgrades(product_upgraders)
return True
def get_version(self, db=None, initial=False):
"""Return the current version of the database. If the
optional argument `initial` is set to `True`, the version of
the database used at the time of creation will be returned.
"""
rows = self.db_direct_query("""
SELECT value FROM system WHERE name='%sdatabase_version'
""" % ('initial_' if initial else ''))
return (rows and int(rows[0][0])) or 0
def enable_multiproduct_schema(self, enable=True):
self._multiproduct_schema_enabled = enable
BloodhoundIterableCursor.cache_reset()
# replace trac.env.Environment with Environment
trac.env.Environment = Environment
# this must follow the monkey patch (trac.env.Environment) above, otherwise
# trac.test.EnvironmentStub will not be correct as the class will derive from
# not replaced trac.env.Environment
import trac.test
class EnvironmentStub(trac.test.EnvironmentStub):
"""Bloodhound test environment stub
This class replaces trac.test.EnvironmentStub and extends it with parent
and product properties (same case as with the Environment).
"""
def __init__(self, default_data=False, enable=None, disable=None,
path=None, destroying=False):
self.parent = None
self.product = None
self._multiproduct_schema_enabled = False
super(EnvironmentStub, self).__init__(default_data=False,
enable=enable, disable=disable,
path=path, destroying=destroying)
if default_data:
self.reset_db(default_data)
@staticmethod
def enable_component_in_config(env, cls):
"""Keep track of enabled state in configuration as well
during test runs. This is closer to reality than
inherited `enable_component` method.
"""
env.config['components'].set(env._component_name(cls), 'enabled')
env.enabled.clear()
env.components.pop(cls, None)
try:
del env._rules
except AttributeError:
pass
# FIXME: Shall we ?
#env.config.save()
@staticmethod
def disable_component_in_config(env, component):
"""Keep track of disabled state in configuration as well
during test runs. This is closer to reality than
inherited `disable_component` method.
"""
if isinstance(component, type):
cls = component
else:
cls = component.__class__
env.config['components'].set(env._component_name(cls), 'disabled')
env.enabled.clear()
env.components.pop(cls, None)
try:
del env._rules
except AttributeError:
pass
env.config.save()
def reset_db(self, default_data=None):
multiproduct_schema = self._multiproduct_schema_enabled
self._multiproduct_schema_enabled = False
try:
super(EnvironmentStub, self).reset_db(default_data=default_data)
finally:
self._multiproduct_schema_enabled = multiproduct_schema
# replace trac.test.EnvironmentStub
trac.test.EnvironmentStub = EnvironmentStub
class ProductEnvironment(Component, ComponentManager):
"""Bloodhound product-aware environment manager.
Bloodhound encapsulates access to product resources stored inside a
Trac environment via product environments. They are compatible lightweight
irepresentations of top level environment.
Product environments contain among other things:
* configuration key-value pairs stored in the database,
* product-aware clones of the wiki and ticket attachments files,
Product environments do not have:
* product-specific templates and plugins,
* a separate database
* active participation in database upgrades and other setup tasks
See https://issues.apache.org/bloodhound/wiki/Proposals/BEP-0003
"""
class __metaclass__(ComponentMeta):
def select_global_env(f):
"""Replaces env with env.parent where appropriate"""
# Keep the signature of __call__ method
def __call__(self, env, *args, **kwargs):
g_env = env.parent if isinstance(env,
ProductEnvironment) else env
return f(self, g_env, *args, **kwargs)
__call__.clear = f.clear
return __call__
def product_env_keymap(args, kwds, kwd_mark):
# Remove meta-reference to self (i.e. product env class)
args = args[1:]
try:
product = kwds['product']
except KeyError:
# Product provided as positional argument
if isinstance(args[1], Product):
args = (args[0], args[1].prefix) + args[2:]
else:
# Product supplied as keyword argument
if isinstance(product, Product):
kwds['product'] = product.prefix
return default_keymap(args, kwds, kwd_mark)
@select_global_env
@lru_cache(maxsize=100, keymap=product_env_keymap)
def __call__(self, *args, **kwargs):
"""Return an existing instance if there is a hit
in the global LRU cache, otherwise create a new instance.
"""
return ComponentMeta.__call__(self, *args, **kwargs)
del product_env_keymap, select_global_env
implements(trac.env.ISystemInfoProvider, IPermissionRequestor)
setup_participants = ExtensionPoint(trac.env.IEnvironmentSetupParticipant)
multi_product_support_components = ExtensionPoint(ISupportMultiProductEnvironment)
@classmethod
def clear_env_cache(cls):
cls.__metaclass__.__call__.clear()
@property
def product_setup_participants(self):
return [
component for component in self.setup_participants
if component not in self.multi_product_support_components
]
components_section = ConfigSection('components',
"""This section is used to enable or disable components
provided by plugins, as well as by Trac itself.
See also: TracIni , TracPlugins
""")
@property
def shared_plugins_dir():
"""Product environments may not add plugins.
"""
return ''
_base_url = Option('trac', 'base_url', '',
"""Reference URL for the Trac deployment.
This is the base URL that will be used when producing
documents that will be used outside of the web browsing
context, like for example when inserting URLs pointing to Trac
resources in notification e-mails.""", doc_domain='multiproduct')
@property
def base_url(self):
base_url = self._base_url
if base_url == self.parent.base_url:
return ''
return base_url
_base_url_for_redirect = BoolOption('trac', 'use_base_url_for_redirect',
False,
"""Optionally use `[trac] base_url` for redirects.
In some configurations, usually involving running Trac behind
a HTTP proxy, Trac can't automatically reconstruct the URL
that is used to access it. You may need to use this option to
force Trac to use the `base_url` setting also for
redirects. This introduces the obvious limitation that this
environment will only be usable when accessible from that URL,
as redirects are frequently used. ''(since 0.10.5)''""",
doc_domain='multiproduct')
@property
def project_name(self):
"""Name of the product.
"""
return self.product.name
@property
def project_description(self):
"""Short description of the product.
"""
return self.product.description
@property
def project_url(self):
"""URL of the main project web site, usually the website in
which the `base_url` resides. This is used in notification
e-mails.
"""
# FIXME: Should products have different values i.e. config option ?
return self.parent.project_url
project_admin = Option('project', 'admin', '',
"""E-Mail address of the product's leader / administrator.""",
doc_domain='multiproduct')
@property
def project_footer(self):
"""Page footer text (right-aligned).
"""
# FIXME: Should products have different values i.e. config option ?
return self.parent.project_footer
project_icon = Option('project', 'icon', 'common/trac.ico',
"""URL of the icon of the product.""", doc_domain='multiproduct')
log_type = Option('logging', 'log_type', 'inherit',
"""Logging facility to use.
Should be one of (`inherit`, `none`, `file`, `stderr`,
`syslog`, `winlog`).""", doc_domain='multiproduct')
log_file = Option('logging', 'log_file', 'trac.log',
"""If `log_type` is `file`, this should be a path to the
log-file. Relative paths are resolved relative to the `log`
directory of the environment.""", doc_domain='multiproduct')
log_level = Option('logging', 'log_level', 'DEBUG',
"""Level of verbosity in log.
Should be one of (`CRITICAL`, `ERROR`, `WARN`, `INFO`, `DEBUG`).""",
doc_domain='multiproduct')
log_format = Option('logging', 'log_format', None,
"""Custom logging format.
If nothing is set, the following will be used:
Trac[$(module)s] $(levelname)s: $(message)s
In addition to regular key names supported by the Python
logger library (see
http://docs.python.org/library/logging.html), one could use:
- $(path)s the path for the current environment
- $(basename)s the last path component of the current environment
- $(project)s the project name
Note the usage of `$(...)s` instead of `%(...)s` as the latter form
would be interpreted by the ConfigParser itself.
Example:
`($(thread)d) Trac[$(basename)s:$(module)s] $(levelname)s: $(message)s`
''(since 0.10.5)''""", doc_domain='multiproduct')
def __init__(self, env, product, create=False):
"""Initialize the product environment.
:param env: the global Trac environment
:param product: product prefix or an instance of
multiproduct.model.Product
"""
if not isinstance(env, trac.env.Environment):
cls = self.__class__
raise TypeError("Initializer must be called with " \
"trac.env.Environment instance as first argument " \
"(got %s instance instead)" %
(cls.__module__ + '.' + cls.__name__, ))
ComponentManager.__init__(self)
if isinstance(product, Product):
if product._env is not env:
raise ValueError("Product's environment mismatch")
elif isinstance(product, basestring):
products = Product.select(env, where={'prefix': product})
if len(products) == 1 :
product = products[0]
else:
env.log.debug("Products for '%s' : %s",
product, products)
raise LookupError("Missing product %s" % (product,))
self.parent = env
self.product = product
self.systeminfo = []
self.setup_config()
# when creating product environment, invoke `IEnvironmentSetupParticipant.environment_created`
# for all setup participants that don't support multi product environments
if create:
for participant in self.product_setup_participants:
with ComponentEnvironmentContext(self, participant):
participant.environment_created()
def __getitem__(self, cls):
if issubclass(cls, trac.env.Environment):
return self.parent
elif cls is self.__class__:
return self
else:
return ComponentManager.__getitem__(self, cls)
def __getattr__(self, attrnm):
"""Forward attribute access request to parent environment.
Initially this will affect the following members of
`trac.env.Environment` class:
system_info_providers, secure_cookies, project_admin_trac_url,
get_system_info, get_version, get_templates_dir, get_templates_dir,
get_log_dir, backup
"""
try:
if attrnm in ('parent', '_rules'):
raise AttributeError
return getattr(self.parent, attrnm)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'" %
(self.__class__.__name__, attrnm))
def __repr__(self):
return "<%s %s at %s>" % (self.__class__.__name__,
repr(self.product.prefix),
hex(id(self)))
@lazy
def path(self):
"""The subfolder `./products/<product prefix>` relative to the
top-level directory of the global environment will be the root of
product file system area.
"""
folder = os.path.join(self.parent.path, 'products', self.product.prefix)
if not os.path.exists(folder):
os.makedirs(folder)
return folder
# IPermissionRequestor methods
def get_permission_actions(self):
"""Implement the product-specific `PRODUCT_ADMIN` meta permission.
"""
actions = set()
permsys = PermissionSystem(self)
for requestor in permsys.requestors:
if requestor is not self and requestor is not permsys:
for action in requestor.get_permission_actions() or []:
if isinstance(action, tuple):
actions.add(action[0])
else:
actions.add(action)
# PermissionSystem's method was not invoked
actions.add('EMAIL_VIEW')
# FIXME: should not be needed, JIC better double check
actions.discard('TRAC_ADMIN')
return [('PRODUCT_ADMIN', list(actions))]
# ISystemInfoProvider methods
# Same as parent environment's . Avoid duplicated code
component_activated = trac.env.Environment.component_activated.im_func
_component_name = trac.env.Environment._component_name.im_func
_component_rules = trac.env.Environment._component_rules
enable_component = trac.env.Environment.enable_component.im_func
get_known_users = trac.env.Environment.get_known_users.im_func
get_repository = trac.env.Environment.get_repository.im_func
is_component_enabled_local = trac.env.Environment.is_component_enabled.im_func
def is_enabled(self, cls):
"""Return whether the given component class is enabled."""
modtime = max(self.config.get_lock_file_mtime(),
self.config._lastmtime)
if modtime > self._config_mtime:
self.enabled.clear()
try:
del self._rules
except AttributeError:
pass
# FIXME : Improve cache hits by tracking global env last mtime
self.parent.enabled.clear()
try:
del self.parent._rules
except AttributeError:
pass
self._config_mtime = modtime
return super(ProductEnvironment, self).is_enabled(cls)
def is_component_enabled(self, cls):
"""Implemented to only allow activation of components already
activated in the global environment that are in turn not disabled in
the configuration.
This is called by the `ComponentManager` base class when a
component is about to be activated. If this method returns
`False`, the component does not get activated. If it returns
`None`, the component only gets activated if it is located in
the `plugins` directory of the environment.
"""
if cls is self.__class__:
# Prevent lookups in parent env ... will always fail
return True
# FIXME : Maybe checking for ComponentManager is too drastic
elif issubclass(cls, ComponentManager):
# Avoid clashes with overridden Environment's options
return False
elif self.parent[cls] is None:
return False
return self.is_component_enabled_local(cls)
def get_db_cnx(self):
"""Return a database connection from the connection pool
:deprecated: Use :meth:`db_transaction` or :meth:`db_query` instead
`db_transaction` for obtaining the `db` database connection
which can be used for performing any query
(SELECT/INSERT/UPDATE/DELETE)::
with env.db_transaction as db:
...
`db_query` for obtaining a `db` database connection which can
be used for performing SELECT queries only::
with env.db_query as db:
...
"""
return BloodhoundConnectionWrapper(self.parent.get_db_cnx(), self)
@property
def db_exc(self):
"""Return an object (typically a module) containing all the
backend-specific exception types as attributes, named
according to the Python Database API
(http://www.python.org/dev/peps/pep-0249/).
To catch a database exception, use the following pattern::
try:
with env.db_transaction as db:
...
except env.db_exc.IntegrityError, e:
...
"""
# exception types same as in global environment
return self.parent.db_exc
def with_transaction(self, db=None):
"""Decorator for transaction functions :deprecated:
"""
raise NotImplementedError('Deprecated method')
def get_read_db(self):
"""Return a database connection for read purposes :deprecated:
See `trac.db.api.get_read_db` for detailed documentation.
"""
return BloodhoundConnectionWrapper(self.parent.get_read_db(), self)
@property
def db_query(self):
"""Return a context manager which can be used to obtain a
read-only database connection.
Example::
with env.db_query as db:
cursor = db.cursor()
cursor.execute("SELECT ...")
for row in cursor.fetchall():
...
Note that a connection retrieved this way can be "called"
directly in order to execute a query::
with env.db_query as db:
for row in db("SELECT ..."):
...
If you don't need to manipulate the connection itself, this
can even be simplified to::
for row in env.db_query("SELECT ..."):
...
:warning: after a `with env.db_query as db` block, though the
`db` variable is still available, you shouldn't use it as it
might have been closed when exiting the context, if this
context was the outermost context (`db_query` or
`db_transaction`).
"""
return ProductEnvContextManager(QueryContextManager(self.parent), self)
@property
def db_transaction(self):
"""Return a context manager which can be used to obtain a
writable database connection.
Example::
with env.db_transaction as db:
cursor = db.cursor()
cursor.execute("UPDATE ...")
Upon successful exit of the context, the context manager will
commit the transaction. In case of nested contexts, only the
outermost context performs a commit. However, should an
exception happen, any context manager will perform a rollback.
Like for its read-only counterpart, you can directly execute a
DML query on the `db`::
with env.db_transaction as db:
db("UPDATE ...")
If you don't need to manipulate the connection itself, this
can also be simplified to::
env.db_transaction("UPDATE ...")
:warning: after a `with env.db_transaction` as db` block,
though the `db` variable is still available, you shouldn't
use it as it might have been closed when exiting the
context, if this context was the outermost context
(`db_query` or `db_transaction`).
"""
return ProductEnvContextManager(TransactionContextManager(self.parent), self)
def shutdown(self, tid=None):
"""Close the environment."""
RepositoryManager(self).shutdown(tid)
# FIXME: Shared DB so IMO this should not happen ... at least not here
#DatabaseManager(self).shutdown(tid)
if tid is None:
self.log.removeHandler(self._log_handler)
self._log_handler.flush()
self._log_handler.close()
del self._log_handler
def create(self, options=[]):
"""Placeholder for compatibility when trying to create the basic
directory structure of the environment, etc ...
This method does nothing at all.
"""
# TODO: Handle options args
def setup_config(self):
"""Load the configuration object.
"""
import trac.config
parent_path = MultiProductSystem(self.parent).product_config_parent
if parent_path and os.path.isfile(parent_path):
parents = [trac.config.Configuration(parent_path)]
else:
parents = [self.parent.config]
self.config = Configuration(self.parent, self.product.prefix, parents)
self._config_mtime = 0
self.setup_log()
def setup_log(self):
"""Initialize the logging sub-system."""
from trac.log import logger_handler_factory
logtype = self.log_type
logfile = self.log_file
format = self.log_format
self.parent.log.debug("Log type '%s' for product '%s'",
logtype, self.product.prefix)
# Force logger inheritance on identical configuration
if (logtype, logfile, format) == (self.parent.log_type,
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/util/translation.py | bloodhound_multiproduct/multiproduct/util/translation.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Translation functions and classes.
"""
from trac.util.translation import domain_functions
#------------------------------------------------------
# Internationalization
#------------------------------------------------------
_, ngettext, tag_, tagn_, gettext, N_, add_domain = \
domain_functions('multiproduct', ('_', 'ngettext', 'tag_', 'tagn_',
'gettext', 'N_', 'add_domain'))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/util/__init__.py | bloodhound_multiproduct/multiproduct/util/__init__.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from genshi.builder import tag
import trac.db.api
from trac.util.text import unquote_label
from trac.wiki.formatter import LinkFormatter
from trac.core import Component, ComponentMeta
#----------------------------
# Component replacement base
#----------------------------
class ReplacementComponentMeta(ComponentMeta):
"""Component replacement meta class"""
def __new__(mcs, name, bases, d):
if bases[0] != Component:
bases = bases[1:]
base_class = bases[0]
# undo what has been done by ComponentMeta.__new___ for the
# replacement component base class
# remove implemented interfaces from registry for the base class
for itf in base_class._implements:
ComponentMeta._registry[itf] = filter(lambda c: c != base_class,
ComponentMeta._registry[itf])
# remove base class from components list
ComponentMeta._components = filter(lambda c: c != base_class,
ComponentMeta._components)
base_class._implements = []
base_class.abstract = True
return ComponentMeta.__new__(mcs, name, bases, d)
class ReplacementComponent(Component):
"""Base class for components that replace existing trac
implementations"""
__metaclass__ = ReplacementComponentMeta
#--------------------------
# Custom wiki formatters
#--------------------------
class EmbeddedLinkFormatter(LinkFormatter):
"""Format the inner TracLinks expression corresponding to resources
in compound links e.g. product:PREFIX:ticket:1 , global:ticket:1
"""
def __init__(self, env, context, parent_match=None):
"""Extend initializer signature to accept parent match
@param parent_match: mapping object containing the following keys
- ns : namespace of parent resolver
- target : target supplied in to parent resolver
- label: label supplied in to parent resolver
- fullmatch : parent regex match (optional)
"""
super(EmbeddedLinkFormatter, self).__init__(env, context)
self.parent_match = parent_match
self.auto_quote = False
def match(self, wikitext):
if self.auto_quote:
parts = tuple(wikitext.split(':', 1))
if len(parts) == 2:
if parts[1]:
_wikitext = '%s:"%s"' % parts
else:
_wikitext = '[%s:]' % parts[:1]
else:
_wikitext = wikitext
return super(EmbeddedLinkFormatter, self).match(_wikitext)
@staticmethod
def enhance_link(link):
return link
def handle_match(self, fullmatch):
if self.parent_match is None:
return super(EmbeddedLinkFormatter, self).handle_match(fullmatch)
for itype, match in fullmatch.groupdict().items():
if match and not itype in self.wikiparser.helper_patterns:
# Check for preceding escape character '!'
if match[0] == '!':
# Erroneous expression. Nested link would be escaped
return tag.a(self.parent_match['label'], class_='missing')
if itype in self.wikiparser.external_handlers:
#TODO: Important! Add product prefix in label (when needed?)
external_handler = self.wikiparser.external_handlers[itype]
link = external_handler(self, match, fullmatch)
else:
internal_handler = getattr(self, '_%s_formatter' % itype)
link = internal_handler(match, fullmatch)
return self.enhance_link(link)
# Overridden formatter methods
# TODO : Override more if necessary
def _shref_formatter(self, match, fullmatch):
if self.parent_match is None:
return super(EmbeddedLinkFormatter, self)._shref_formatter(
match, fullmatch)
ns = fullmatch.group('sns')
target = unquote_label(fullmatch.group('stgt'))
label = (self.parent_match['label']
if self.parent_match['label'] != self.parent_match['target']
else target)
return self._make_link(ns, target, match, label, fullmatch)
def _lhref_formatter(self, match, fullmatch):
if self.parent_match is None:
return super(EmbeddedLinkFormatter, self)._lhref_formatter(
match, fullmatch)
rel = fullmatch.group('rel')
ns = fullmatch.group('lns')
target = unquote_label(fullmatch.group('ltgt'))
label = (self.parent_match['label']
if self.parent_match['label'] != self.parent_match['target']
else fullmatch.group('label'))
return self._make_lhref_link(match, fullmatch, rel, ns, target, label)
def get_db_connector_from_uri(env):
connector, arg = trac.db.api._parse_db_str(
trac.db.api.DatabaseManager(env).connection_uri
)
return connector
def using_sqlite_backend(env):
return get_db_connector_from_uri(env) == 'sqlite'
def using_mysql_backend(env):
return get_db_connector_from_uri(env) == 'mysql'
#----------------------
# Useful regex
#----------------------
IDENTIFIER = r'(?!\d)\w+'
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/ticket/web_ui.py | bloodhound_multiproduct/multiproduct/ticket/web_ui.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from genshi.builder import tag
from trac.core import TracError
from trac.ticket.model import Ticket
from trac.ticket.web_ui import TicketModule
from trac.ticket.report import ReportModule
from trac.attachment import AttachmentModule
from trac.ticket.api import TicketSystem
from trac.resource import Resource, get_resource_shortname, ResourceNotFound
from trac.search import search_to_sql, shorten_result
from trac.util.datefmt import from_utimestamp
from multiproduct.api import MultiProductSystem, PRODUCT_SYNTAX_DELIMITER_RE
from multiproduct.env import ProductEnvironment
from multiproduct.model import Product
from multiproduct.util import IDENTIFIER
from multiproduct.util.translation import _, tag_
from multiproduct.web_ui import ProductModule
class ProductTicketModule(TicketModule):
"""Product Overrides for the TicketModule"""
# IRequestHandler methods
#def match_request(self, req):
# override not yet required
def process_request(self, req):
"""Override for TicketModule process_request"""
ticketid = req.args.get('id')
productid = req.args.get('productid', '')
if not ticketid:
# if /newticket is executed in global scope (from QCT), redirect
# the request to /products/<first_product_in_DB>/newticket
if not productid and \
not isinstance(self.env, ProductEnvironment):
default_product = self.env.config.get('ticket',
'default_product')
products = Product.select(self.env, {'fields': ['prefix']})
prefixes = [prod.prefix for prod in products]
if not default_product or default_product not in prefixes:
default_product = products[0].prefix
req.redirect(req.href.products(default_product, 'newticket'))
return self._process_newticket_request(req)
if req.path_info in ('/newticket', '/products'):
raise TracError(_("id can't be set for a new ticket request."))
if isinstance(self.env, ProductEnvironment):
ticket = Ticket(self.env, ticketid)
if productid and ticket['product'] != productid:
msg = "Ticket %(id)s in product '%(prod)s' does not exist."
raise ResourceNotFound(_(msg, id=ticketid, prod=productid),
_("Invalid ticket number"))
return self._process_ticket_request(req)
# executed in global scope -> assume ticketid=UID, redirect to product
with self.env.db_direct_query as db:
rows = db("""SELECT id,product FROM ticket WHERE uid=%s""",
(ticketid,))
if not rows:
msg = "Ticket with uid %(uid)s does not exist."
raise ResourceNotFound(_(msg, uid=ticketid),
_("Invalid ticket number"))
tid, prefix = rows[0]
req.redirect(req.href.products(prefix, 'ticket', tid))
# INavigationContributor methods
#def get_active_navigation_item(self, req):
# override not yet required
def get_navigation_items(self, req):
"""Overriding TicketModules New Ticket nav item"""
return
# ISearchSource methods
#def get_search_filters(self, req):
# override not yet required
def get_search_results(self, req, terms, filters):
"""Overriding search results for Tickets"""
if not 'ticket' in filters:
return
ticket_realm = Resource('ticket')
with self.env.db_query as db:
sql, args = search_to_sql(db, ['summary', 'keywords',
'description', 'reporter', 'cc',
db.cast('id', 'text')], terms)
sql2, args2 = search_to_sql(db, ['newvalue'], terms)
sql3, args3 = search_to_sql(db, ['value'], terms)
ticketsystem = TicketSystem(self.env)
if req.args.get('product'):
productsql = "product='%s' AND" % req.args.get('product')
else:
productsql = ""
for summary, desc, author, type, tid, ts, status, resolution in \
db("""SELECT summary, description, reporter, type, id,
time, status, resolution
FROM ticket
WHERE (%s id IN (
SELECT id FROM ticket WHERE %s
UNION
SELECT ticket FROM ticket_change
WHERE field='comment' AND %s
UNION
SELECT ticket FROM ticket_custom WHERE %s
))
""" % (productsql, sql, sql2, sql3),
args + args2 + args3):
t = ticket_realm(id=tid)
if 'TICKET_VIEW' in req.perm(t):
yield (req.href.ticket(tid),
tag_("%(title)s: %(message)s",
title=tag.span(
get_resource_shortname(self.env, t),
class_=status),
message=ticketsystem.format_summary(
summary, status, resolution, type)),
from_utimestamp(ts), author,
shorten_result(desc, terms))
# Attachments
for result in AttachmentModule(self.env) \
.get_search_results(req, ticket_realm, terms):
yield result
class ProductReportModule(ReportModule):
"""Multiproduct replacement for ReportModule"""
# INavigationContributor methods
#def get_active_navigation_item(self, req):
# not yet required
def get_navigation_items(self, req):
if 'REPORT_VIEW' in req.perm:
href = ProductModule.get_product_path(self.env, req, 'report')
yield ('mainnav', 'tickets', tag.a(_('View Tickets'), href=href))
# IWikiSyntaxProvider methods
#def get_link_resolvers(self):
# not yet required
def get_wiki_syntax(self):
# FIXME: yield from
for s in super(ProductReportModule, self).get_wiki_syntax():
yield s
# Previously unmatched prefix
yield (r"!?\{(?P<prp>%s(?:\s+|(?:%s)))[0-9]+\}"
% (IDENTIFIER, PRODUCT_SYNTAX_DELIMITER_RE),
lambda x, y, z: self._format_link(x, 'report', y[1:-1], y, z))
# Absolute product report syntax
yield (r"!?\{(?P<prns>global:|product:%s(?:\s+|:))"
r"(?P<prid>[0-9]+)\}" % (IDENTIFIER,),
lambda x, y, z: (self._format_mplink(x, 'report', y[1:-1], y, z)))
def _format_link(self, formatter, ns, target, label, fullmatch=None):
intertrac = \
formatter.shorthand_intertrac_helper(ns, target, label, fullmatch)
if intertrac:
return intertrac
# second chance to match InterTrac prefix as product prefix
it_report = fullmatch.group('it_' + ns) or fullmatch.group('prp')
if it_report:
return self._format_mplink(formatter, ns, target, label, fullmatch)
report, args, fragment = formatter.split_link(target)
return tag.a(label, href=formatter.href.report(report) + args,
class_='report')
def _format_mplink(self, formatter, ns, target, label, fullmatch=None):
mpsys = self.env[MultiProductSystem]
if mpsys is not None:
substeps = []
prns = fullmatch.group('prns')
if not prns:
# Forwarded from _format_link, inherit current context
product_id = fullmatch.group('it_' + ns) or \
fullmatch.group('prp')
if product_id:
product_ns = 'product'
substeps = [product_id.strip()]
elif isinstance(self.env, ProductEnvironment):
product_ns = 'product'
substeps = [self.env.product.prefix]
else:
product_ns = 'global'
elif prns == 'global:':
product_ns = 'global'
elif prns.startswith('product:'):
product_ns, product_id = prns.strip().split(':')[:2]
substeps = [product_id]
report_id = fullmatch.group('prid') or \
re.match(r'^.*?(\d+)$', target).group(1)
substeps += [ns, report_id]
return mpsys._format_link(formatter, product_ns,
u':'.join(substeps),
label, fullmatch)
else:
return tag.a(label, class_='missing product')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/ticket/query.py | bloodhound_multiproduct/multiproduct/ticket/query.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import with_statement
import re
from itertools import groupby
from math import ceil
from datetime import datetime, timedelta
from genshi.builder import tag
from trac.core import TracError
from trac.db import get_column_names
from trac.mimeview.api import Mimeview
from trac.ticket.api import TicketSystem
from trac.ticket.query import Query, QueryModule, TicketQueryMacro, QueryValueError
from trac.util.datefmt import from_utimestamp, utc, to_timestamp
from trac.util.text import shorten_line
from trac.web import parse_arg_list, arg_list_to_args
from trac.web.chrome import Chrome, add_stylesheet, add_link, web_context, \
add_script_data, add_script, add_ctxtnav, add_warning
from trac.resource import Resource
from multiproduct.dbcursor import GLOBAL_PRODUCT
from multiproduct.env import lookup_product_env, resolve_product_href, \
ProductEnvironment
from multiproduct.util.translation import _, tag_
class ProductQuery(Query):
"""Product Overrides for TracQuery.
This class allows for writing TracQuery expressions matching resources
beyond product boundaries.
"""
def _count(self, sql, args):
if isinstance(self.env, ProductEnvironment):
return super(ProductQuery, self)._count(sql, args)
cnt = self.env.db_direct_query("SELECT COUNT(*) FROM (%s) AS x"
% sql, args)[0][0]
# "AS x" is needed for MySQL ("Subqueries in the FROM Clause")
self.env.log.debug("Count results in Query: %d", cnt)
return cnt
def get_columns(self):
super(ProductQuery, self).get_columns()
if not 'product' in self.cols and self.group != 'product':
# make sure 'product' is always present
# (needed for product context, href, permission checks ...)
# but don't implicitly include it if items are grouped by product
self.cols.insert(0, 'product')
return self.cols
def _get_ticket_href(self, prefix, tid):
try:
env = lookup_product_env(self.env, prefix)
except LookupError:
return '#invalid-product-' + prefix
else:
href = resolve_product_href(env, self.env)
return href.ticket(tid)
def get_href(self, href, id=None, order=None, desc=None, format=None,
max=None, page=None):
from multiproduct.hooks import ProductizedHref
return super(ProductQuery, self).get_href(
ProductizedHref(href, self.env.href.base), id, order, desc,
format, max, page)
def execute(self, req=None, db=None, cached_ids=None, authname=None,
tzinfo=None, href=None, locale=None):
"""Retrieve the list of matching tickets.
:since 1.0: the `db` parameter is no longer needed and will be removed
in version 1.1.1
"""
with self.env.db_direct_query as db:
cursor = db.cursor()
self.num_items = 0
sql, args = self.get_sql(req, cached_ids, authname, tzinfo, locale)
if sql.startswith('SELECT ') and not sql.startswith('SELECT DISTINCT '):
sql = 'SELECT DISTINCT * FROM (' + sql + ') AS subquery'
if isinstance(self.env, ProductEnvironment):
sql = sql + """ WHERE product='%s'""" % (self.env.product.prefix, )
self.num_items = self._count(sql, args)
if self.num_items <= self.max:
self.has_more_pages = False
if self.has_more_pages:
max = self.max
if self.group:
max += 1
sql = sql + " LIMIT %d OFFSET %d" % (max, self.offset)
if self.page > int(ceil(float(self.num_items) / self.max)) and \
self.num_items != 0:
raise TracError(_("Page %(page)s is beyond the number of "
"pages in the query", page=self.page))
# self.env.log.debug("SQL: " + sql % tuple([repr(a) for a in args]))
cursor.execute(sql, args)
columns = get_column_names(cursor)
fields = []
for column in columns:
fields += [f for f in self.fields if f['name'] == column] or \
[None]
results = []
product_idx = columns.index('product')
column_indices = range(len(columns))
for row in cursor:
result = {}
for i in column_indices:
name, field, val = columns[i], fields[i], row[i]
if name == 'reporter':
val = val or 'anonymous'
elif name == 'id':
val = int(val)
result['href'] = self._get_ticket_href(
row[product_idx], val)
elif name in self.time_fields:
val = from_utimestamp(val)
elif field and field['type'] == 'checkbox':
try:
val = bool(int(val))
except (TypeError, ValueError):
val = False
elif val is None:
val = ''
result[name] = val
results.append(result)
cursor.close()
return results
import trac.ticket.query
trac.ticket.query.Query = ProductQuery
trac.ticket.Query = ProductQuery
class ProductQueryModule(QueryModule):
def process_request(self, req, env=None):
tmpenv = self.env
if isinstance(self.env, ProductEnvironment) and env is not None:
self.env = env
result = super(ProductQueryModule, self).process_request(req)
self.env = tmpenv
return result
trac.ticket.query.QueryModule = ProductQueryModule
trac.ticket.QueryModule = ProductQueryModule
class ProductTicketQueryMacro(TicketQueryMacro):
"""TracQuery macro retrieving results across product boundaries.
"""
@staticmethod
def parse_args(content):
"""Parse macro arguments and translate them to a query string."""
clauses = [{}]
argv = []
kwargs = {}
for arg in TicketQueryMacro._comma_splitter.split(content):
arg = arg.replace(r'\,', ',')
m = re.match(r'\s*[^=]+=', arg)
if m:
kw = arg[:m.end() - 1].strip()
value = arg[m.end():]
if kw in ('order', 'max', 'format', 'col', 'product'):
kwargs[kw] = value
else:
clauses[-1][kw] = value
elif arg.strip() == 'or':
clauses.append({})
else:
argv.append(arg)
clauses = filter(None, clauses)
if len(argv) > 0 and not 'format' in kwargs: # 0.10 compatibility hack
kwargs['format'] = argv[0]
if 'order' not in kwargs:
kwargs['order'] = 'id'
if 'max' not in kwargs:
kwargs['max'] = '0' # unlimited by default
format = kwargs.pop('format', 'list').strip().lower()
if format in ('list', 'compact'): # we need 'status' and 'summary'
if 'col' in kwargs:
kwargs['col'] = 'status|summary|' + kwargs['col']
else:
kwargs['col'] = 'status|summary'
query_string = '&or&'.join('&'.join('%s=%s' % item
for item in clause.iteritems())
for clause in clauses)
return query_string, kwargs, format
def expand_macro(self, formatter, name, content):
req = formatter.req
query_string, kwargs, format = self.parse_args(content)
if query_string:
query_string += '&'
query_string += '&'.join('%s=%s' % item
for item in kwargs.iteritems())
env = ProductEnvironment.lookup_global_env(self.env)
query = ProductQuery.from_string(env, query_string)
if format == 'count':
cnt = query.count(req)
return tag.span(cnt, title='%d tickets for which %s' %
(cnt, query_string), class_='query_count')
tickets = query.execute(req)
if format == 'table':
data = query.template_data(formatter.context, tickets,
req=formatter.context.req)
add_stylesheet(req, 'common/css/report.css')
return Chrome(env).render_template(
req, 'query_results.html', data, None, fragment=True)
if format == 'progress':
from trac.ticket.roadmap import (RoadmapModule,
apply_ticket_permissions,
get_ticket_stats,
grouped_stats_data)
add_stylesheet(req, 'common/css/roadmap.css')
def query_href(extra_args, group_value=None):
q = ProductQuery.from_string(env, query_string)
if q.group:
extra_args[q.group] = group_value
q.group = None
for constraint in q.constraints:
constraint.update(extra_args)
if not q.constraints:
q.constraints.append(extra_args)
return q.get_href(formatter.context)
chrome = Chrome(env)
tickets = apply_ticket_permissions(env, req, tickets)
stats_provider = RoadmapModule(env).stats_provider
by = query.group
if not by:
stat = get_ticket_stats(stats_provider, tickets)
data = {
'stats': stat,
'stats_href': query_href(stat.qry_args),
'interval_hrefs': [query_href(interval['qry_args'])
for interval in stat.intervals],
'legend': True,
}
return tag.div(
chrome.render_template(req, 'progress_bar.html', data,
None, fragment=True),
class_='trac-progress')
def per_group_stats_data(gstat, group_name):
return {
'stats': gstat,
'stats_href': query_href(gstat.qry_args, group_name),
'interval_hrefs': [query_href(interval['qry_args'],
group_name)
for interval in gstat.intervals],
'percent': '%d / %d' % (gstat.done_count,
gstat.count),
'legend': False,
}
groups = grouped_stats_data(env, stats_provider, tickets, by,
per_group_stats_data)
data = {
'groups': groups, 'grouped_by': by,
'summary': _("Ticket completion status for each %(group)s",
group=by),
}
return tag.div(
chrome.render_template(req, 'progress_bar_grouped.html', data,
None, fragment=True),
class_='trac-groupprogress')
# Formats above had their own permission checks, here we need to
# do it explicitly:
tickets = [t for t in tickets
if 'TICKET_VIEW' in req.perm('ticket', t['id'])]
if not tickets:
return tag.span(_("No results"), class_='query_no_results')
# Cache resolved href targets
hrefcache = {}
def ticket_anchor(ticket):
try:
pvalue = ticket.get('product') or GLOBAL_PRODUCT
envhref = hrefcache[pvalue]
except KeyError:
try:
env = lookup_product_env(self.env, prefix=pvalue,
name=pvalue)
except LookupError:
return tag.a('#%s' % ticket['id'],
class_='missing product')
hrefcache[pvalue] = envhref = \
resolve_product_href(to_env=env, at_env=self.env)
return tag.a('#%s' % ticket['id'],
class_=ticket['status'],
href=envhref.ticket(int(ticket['id'])),
title=shorten_line(ticket['summary']))
def ticket_groups():
groups = []
for v, g in groupby(tickets, lambda t: t[query.group]):
q = ProductQuery.from_string(env, query_string)
# produce the hint for the group
q.group = q.groupdesc = None
order = q.order
q.order = None
title = _("%(groupvalue)s %(groupname)s tickets matching "
"%(query)s", groupvalue=v, groupname=query.group,
query=q.to_string())
# produce the href for the query corresponding to the group
for constraint in q.constraints:
constraint[str(query.group)] = v
q.order = order
href = q.get_href(formatter.context)
groups.append((v, [t for t in g], href, title))
return groups
if format == 'compact':
if query.group:
groups = [(v, ' ',
tag.a('#%s' % u',\u200b'.join(str(t['id'])
for t in g),
href=href, class_='query', title=title))
for v, g, href, title in ticket_groups()]
return tag(groups[0], [(', ', g) for g in groups[1:]])
else:
alist = [ticket_anchor(ticket) for ticket in tickets]
return tag.span(alist[0], *[(', ', a) for a in alist[1:]])
else:
if query.group:
return tag.div(
[(tag.p(tag_('%(groupvalue)s %(groupname)s tickets:',
groupvalue=tag.a(v, href=href, class_='query',
title=title),
groupname=query.group)),
tag.dl([(tag.dt(ticket_anchor(t)),
tag.dd(t['summary'])) for t in g],
class_='wiki compact'))
for v, g, href, title in ticket_groups()])
else:
return tag.div(tag.dl([(tag.dt(ticket_anchor(ticket)),
tag.dd(ticket['summary']))
for ticket in tickets],
class_='wiki compact'))
def is_inline(self, content):
query_string, kwargs, format = self.parse_args(content)
return format in ('count', 'compact')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/ticket/__init__.py | bloodhound_multiproduct/multiproduct/ticket/__init__.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from multiproduct.ticket.web_ui import ProductTicketModule, ProductReportModule
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_multiproduct/multiproduct/ticket/batch.py | bloodhound_multiproduct/multiproduct/ticket/batch.py | # -*- coding: UTF-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.ticket.batch import BatchModifyModule
from trac.web.chrome import add_script_data
from multiproduct.env import ProductEnvironment
from multiproduct.util.translation import _
class ProductBatchModifyModule(BatchModifyModule):
def add_template_data(self, req, data, tickets):
if isinstance(self.env, ProductEnvironment):
super(ProductBatchModifyModule, self).add_template_data(req, data,
tickets)
return
data['batch_modify'] = True
data['query_href'] = req.session['query_href'] or req.href.query()
tickets_by_product = {}
for t in tickets:
tickets_by_product.setdefault(t['product'], []).append(t)
data['action_controls'] = []
global_env = ProductEnvironment.lookup_global_env(self.env)
cache = {}
for k, v in tickets_by_product.iteritems():
batch_module = cache.get(k or '')
if batch_module is None:
env = ProductEnvironment(global_env, k) if k else global_env
cache[k] = batch_module = ProductBatchModifyModule(env)
data['action_controls'] += batch_module._get_action_controls(req,
v)
batch_list_modes = [
{'name': _("add"), 'value': "+"},
{'name': _("remove"), 'value': "-"},
{'name': _("add / remove"), 'value': "+-"},
{'name': _("set to"), 'value': "="},
]
add_script_data(req, batch_list_modes=batch_list_modes,
batch_list_properties=self._get_list_fields())
import trac.ticket.batch
trac.ticket.batch.BatchModifyModule = ProductBatchModifyModule
trac.ticket.BatchModifyModule = ProductBatchModifyModule
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/setup.py | bloodhound_relations/setup.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from setuptools import setup
DESC = """Ticket relations plugin for Apache(TM) Bloodhound.
Add ticket relations functionality to Bloodhound sites.
"""
versions = [
(0, 4, 0),
(0, 5, 0),
(0, 6, 0),
(0, 7, 0),
(0, 8, 0),
(0, 9, 0),
]
latest = '.'.join(str(x) for x in versions[-1])
status = {
'planning': "Development Status :: 1 - Planning",
'pre-alpha': "Development Status :: 2 - Pre-Alpha",
'alpha': "Development Status :: 3 - Alpha",
'beta': "Development Status :: 4 - Beta",
'stable': "Development Status :: 5 - Production/Stable",
'mature': "Development Status :: 6 - Mature",
'inactive': "Development Status :: 7 - Inactive"
}
dev_status = status["alpha"]
cats = [
dev_status,
"Environment :: Plugins",
"Environment :: Web Environment",
"Framework :: Trac",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Other Audience",
"Intended Audience :: System Administrators",
"License :: Unknown",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: WSGI",
"Topic :: Software Development :: Bug Tracking",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: User Interfaces",
]
# Add the change log to the package description.
chglog = None
try:
from os.path import dirname, join
chglog = open(join(dirname(__file__), "CHANGES"))
DESC += ('\n\n' + chglog.read())
finally:
if chglog:
chglog.close()
DIST_NM = 'BloodhoundRelationsPlugin'
PKG_INFO = {'bhrelations': ('bhrelations', # Package dir
# Package data
['../CHANGES', '../TODO', '../COPYRIGHT',
'../NOTICE', '../README', '../TESTING_README',
'htdocs/*.*', 'htdocs/css/*.css',
'htdocs/img/*.*', 'htdocs/js/*.js',
'templates/*', 'default-pages/*',
'locale/*/LC_MESSAGES/*.mo'],
),
'bhrelations.widgets': (
'bhrelations/widgets', ['templates/*.html']),
'bhrelations.tests': (
'bhrelations/tests', ['data/*.*']),
'bhrelations.utils': (
'bhrelations/utils', []),
}
ENTRY_POINTS = {
'trac.plugins': [
'bhrelations.api = bhrelations.api',
'bhrelations.search = bhrelations.search',
'bhrelations.validation = bhrelations.validation',
'bhrelations.web_ui = bhrelations.web_ui',
'bhrelations.widgets.ticketrelations = bhrelations.widgets.relations',
],
}
extra = {}
try:
from trac.util.dist import get_l10n_cmdclass
cmdclass = get_l10n_cmdclass()
if cmdclass:
extra['cmdclass'] = cmdclass
extractors = [
('**.py', 'trac.dist:extract_python', None),
('**/templates/**.html', 'genshi', None),
('**/templates/**.txt', 'genshi', {
'template_class': 'genshi.template:TextTemplate'
}),
]
extra['message_extractors'] = {
'bhrelations': extractors,
}
except ImportError:
pass
setup(
name=DIST_NM,
version=latest,
description=DESC.split('\n', 1)[0],
author="Apache Bloodhound",
license="Apache License v2",
url="https://bloodhound.apache.org/",
requires=['trac'],
package_dir=dict([p, i[0]] for p, i in PKG_INFO.iteritems()),
packages=PKG_INFO.keys(),
package_data=dict([p, i[1]] for p, i in PKG_INFO.iteritems()),
include_package_data=True,
provides=['%s (%s)' % (p, latest) for p in PKG_INFO.keys()],
obsoletes=['%s (>=%s.0.0, <%s)' % (p, versions[-1][0], latest) \
for p in PKG_INFO.keys()],
entry_points=ENTRY_POINTS,
classifiers=cats,
long_description=DESC,
test_suite='bhrelations.tests.test_suite',
tests_require=['unittest2'] if sys.version_info < (2, 7) else [],
**extra
)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/search.py | bloodhound_relations/bhrelations/search.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.core import Component, implements
from bhsearch.api import IDocIndexPreprocessor
from bhsearch.search_resources.ticket_search import TicketIndexer
from bhrelations.api import RelationsSystem, ResourceIdSerializer,\
IRelationChangingListener, TicketRelationsSpecifics
class RelationsDocPreprocessor(Component):
implements(IDocIndexPreprocessor)
def pre_process(self, doc):
resource_id = ':'.join([
doc.get('product', ''), doc.get('type', ''), doc.get('id')])
try:
rls = RelationsSystem(self.env)
relations = []
for relation in rls._select_relations(resource_id):
relations.extend(self._format_relations(relation))
doc['relations'] = ','.join(relations)
except self.env.db_exc.OperationalError:
# If bhrelations and bhsearch are installed at the same time and
# bhsearch is upgraded before bhrelations, table
# bloodhound_relations will be missing, thus causing the
# OperationalError. As this means that the relations do not
# exist yet, just skip indexing them.
self.log.debug("Not indexing relations for %s", resource_id)
def _format_relations(self, relation):
ris = ResourceIdSerializer
product, realm, res_id = ris.split_full_id(relation.destination)
if realm == 'ticket':
yield '%s:#%s' % (relation.type, res_id)
yield '%s:#%s-%s' % (relation.type, product, res_id)
elif realm == 'wiki':
yield '%s:%s' % (relation.type, res_id)
class RelationSearchUpdater(Component):
implements(IRelationChangingListener)
def adding_relation(self, relation):
self._reindex_endpoints(relation)
def deleting_relation(self, relation, when):
self._reindex_endpoints(relation)
def _reindex_endpoints(self, relation):
trs = TicketRelationsSpecifics(self.env)
ticket_indexer = TicketIndexer(self.env)
for resource in map(ResourceIdSerializer.get_resource_by_id,
(relation.source, relation.destination)):
if resource.realm == 'ticket':
ticket = trs._create_ticket_by_full_id(resource)
ticket_indexer._index_ticket(ticket)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/api.py | bloodhound_relations/bhrelations/api.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import itertools
import re
from datetime import datetime
from pkg_resources import resource_filename
from trac.config import OrderedExtensionsOption, Option
from trac.core import Component, ExtensionPoint, Interface, TracError, \
implements
from trac.db import DatabaseManager
from trac.env import IEnvironmentSetupParticipant
from trac.resource import Neighborhood, Resource, ResourceNotFound, \
ResourceSystem, get_resource_shortname
from trac.ticket.api import ITicketChangeListener, ITicketManipulator, \
TicketSystem
from trac.ticket.model import Ticket
from trac.util.datefmt import to_utimestamp, utc
from trac.web.chrome import ITemplateProvider
from multiproduct.api import ISupportMultiProductEnvironment
from multiproduct.model import Product
from multiproduct.env import ProductEnvironment
from bhrelations import db_default
from bhrelations.model import Relation
from bhrelations.utils import unique
from bhrelations.utils.translation import _, add_domain
PLUGIN_NAME = 'Bloodhound Relations Plugin'
RELATIONS_CONFIG_NAME = 'bhrelations_links'
default_bhrelations_links = {
'children.label': 'is a child of',
'dependency': 'dependson,dependent',
'dependency.validators': 'NoCycles,SingleProduct',
'dependson.blocks': 'true',
'dependson.label': 'depends on',
'dependent.label': 'is a dependency of',
'oneway': 'refersto',
'parent_children': 'parent,children',
'parent.exclusive': 'true',
'parent.label': 'is a parent of',
'parent_children.validators': 'OneToMany,SingleProduct,NoCycles',
'refersto.label': 'refers to',
'duplicate': 'duplicateof,duplicatedby',
'duplicate.validators': 'ReferencesOlder',
'duplicateof.label': 'is a duplicate of',
'duplicatedby.label': 'is duplicated by',
}
#TODO: consider making the interface part of future
# I[*|Resource]ChangingListener approach based on output from the
# correlated discussion in Trac community
# (http://trac.edgewall.org/ticket/11148)
class IRelationChangingListener(Interface):
"""
Extension point interface for components that require notification
when relations are created or deleted and database transaction is not
yet committed. The interface can be used when database actions have to be
made by listener must be performed within the same transaction as
relations modification.
Caution:
Because the database transaction is not yet committed during the event
notification, a long running listener activity may
influence overall database performance or raise lock
or transaction timeout errors. If component have to perform non-transaction
activity, use IRelationChanged interface instead.
If a listener raises an exception, all changes that were made within the
transaction will not be applied.
"""
def adding_relation(relation):
"""
Called when a relation was added but transaction was not committed.
"""
def deleting_relation(relation, when):
"""
Called when a relation was added but transaction was not committed.
"""
class IRelationValidator(Interface):
"""
Extension point interface for relation validators.
"""
def validate(relation):
"""
Validate the relation. If relation is not valid, raise appropriate
exception.
"""
class EnvironmentSetup(Component):
implements(IEnvironmentSetupParticipant, ISupportMultiProductEnvironment,
ITemplateProvider)
# IEnvironmentSetupParticipant methods
def environment_created(self):
self.upgrade_environment(self.env.db_transaction)
def environment_needs_upgrade(self, db):
"""Detects if the installed db version matches the running system"""
db_installed_version = self._get_version(db)
db_version = db_default.DB_VERSION
if db_installed_version > db_version:
raise TracError("""Current db version (%d) newer than supported
by this version of the %s (%d)."""
% (db_installed_version, PLUGIN_NAME, db_version))
needs_upgrade = db_installed_version < db_version or \
not list(self.config.options(RELATIONS_CONFIG_NAME))
return needs_upgrade
def upgrade_environment(self, db):
self.log.debug("upgrading existing environment for %s plugin." %
PLUGIN_NAME)
db_installed_version = self._get_version(db)
if db_installed_version < 1:
self._initialize_db(db)
self._update_db_version(db, db_default.DB_VERSION)
#add upgrade logic later if needed
if not list(self.config.options(RELATIONS_CONFIG_NAME)):
for option, value in default_bhrelations_links.iteritems():
self.config.set(RELATIONS_CONFIG_NAME, option, value)
self.config.save()
print("Your environment has been upgraded with the default "
"[bhrelations_links] configuration.")
def _get_version(self, db):
"""Finds the current version of the bloodhound database schema"""
rows = db("""SELECT value FROM system WHERE name = %s
""", (db_default.DB_SYSTEM_KEY,))
return int(rows[0][0]) if rows else -1
def _update_db_version(self, db, version):
old_version = self._get_version(db)
if old_version != -1:
self.log.info("Updating %s database schema from version %d to %d",
PLUGIN_NAME, old_version, version)
db("""UPDATE system SET value=%s
WHERE name=%s""", (version, db_default.DB_SYSTEM_KEY))
else:
self.log.info("Initial %s database schema set to version %d",
PLUGIN_NAME, version)
db("""INSERT INTO system (name, value) VALUES ('%s','%s')
""" % (db_default.DB_SYSTEM_KEY, version))
return version
def _initialize_db(self, db):
# pylint: disable=protected-access
self.log.debug("creating initial db schema for %s.", PLUGIN_NAME)
db_connector, dummy = DatabaseManager(self.env)._get_connector()
for table in db_default.SCHEMA:
for statement in db_connector.to_sql(table):
db(statement)
# ITemplateProviderMethods
def get_templates_dirs(self):
"""provide the plugin templates"""
return [resource_filename(__name__, 'templates')]
def get_htdocs_dirs(self):
return None
class RelationsSystem(Component):
PARENT_RELATION_TYPE = 'parent'
CHILDREN_RELATION_TYPE = 'children'
changing_listeners = ExtensionPoint(IRelationChangingListener)
all_validators = ExtensionPoint(IRelationValidator)
global_validators = OrderedExtensionsOption(
'bhrelations', 'global_validators',
IRelationValidator,
'NoSelfReferenceValidator, ExclusiveValidator, BlockerValidator',
include_missing=False,
doc="""Validators used to validate all relations,
regardless of their type.""",
doc_domain='bhrelations'
)
duplicate_relation_type = Option(
'bhrelations',
'duplicate_relation',
'duplicateof',
"Relation type to be used with the resolve as duplicate workflow.",
doc_domain='bhrelations')
def __init__(self):
import pkg_resources
locale_dir = pkg_resources.resource_filename(__name__, 'locale')
add_domain(self.env.path, locale_dir)
links, labels, validators, blockers, copy_fields, exclusive = \
self._parse_config()
self._links = links
self._labels = labels
self._validators = validators
self._blockers = blockers
self._copy_fields = copy_fields
self._exclusive = exclusive
self.link_ends_map = {}
for end1, end2 in self.get_ends():
self.link_ends_map[end1] = end2
if end2 is not None:
self.link_ends_map[end2] = end1
def get_ends(self):
return self._links
def add(self,
source_resource_instance,
destination_resource_instance,
relation_type,
comment=None,
author=None,
when=None):
source = ResourceIdSerializer.get_resource_id_from_instance(
self.env, source_resource_instance)
destination = ResourceIdSerializer.get_resource_id_from_instance(
self.env, destination_resource_instance)
if relation_type not in self.link_ends_map:
raise UnknownRelationType(relation_type)
if when is None:
when = datetime.now(utc)
relation = Relation(self.env)
relation.source = source
relation.destination = destination
relation.type = relation_type
relation.comment = comment
relation.author = author
relation.when = when
self.add_relation(relation)
return relation
def get_reverted_relation(self, relation):
"""Return None if relation is one way"""
other_end = self.link_ends_map[relation.type]
if other_end:
return relation.clone_reverted(other_end)
def add_relation(self, relation):
self.validate(relation)
with self.env.db_transaction:
relation.insert()
reverted_relation = self.get_reverted_relation(relation)
if reverted_relation:
reverted_relation.insert()
for listener in self.changing_listeners:
listener.adding_relation(relation)
def delete(self, relation_id, when=None):
if when is None:
when = datetime.now(utc)
relation = Relation.load_by_relation_id(self.env, relation_id)
source = relation.source
destination = relation.destination
relation_type = relation.type
with self.env.db_transaction:
cloned_relation = relation.clone()
relation.delete()
other_end = self.link_ends_map[relation_type]
if other_end:
reverted_relation = Relation(self.env, keys=dict(
source=destination,
destination=source,
type=other_end,
))
reverted_relation.delete()
for listener in self.changing_listeners:
listener.deleting_relation(cloned_relation, when)
from bhrelations.notification import RelationNotifyEmail
RelationNotifyEmail(self.env).notify(cloned_relation, deleted=when)
def delete_resource_relations(self, resource_instance):
sql = "DELETE FROM " + Relation.get_table_name() + \
" WHERE source=%s OR destination=%s"
full_resource_id = ResourceIdSerializer.get_resource_id_from_instance(
self.env, resource_instance)
with self.env.db_transaction as db:
db(sql, (full_resource_id, full_resource_id))
def _debug_select(self):
"""The method is used for debug purposes"""
sql = "SELECT id, source, destination, type FROM bloodhound_relations"
with self.env.db_query as db:
return [db(sql)]
def get_relations(self, resource_instance):
relation_list = []
for relation in self._select_relations_for_resource_instance(
resource_instance):
relation_list.append(dict(
relation_id=relation.get_relation_id(),
destination_id=relation.destination,
destination=ResourceIdSerializer.get_resource_by_id(
relation.destination),
type=relation.type,
comment=relation.comment,
when=relation.when,
author=relation.author,
))
return relation_list
def _select_relations_for_resource_instance(self, resource):
resource_full_id = ResourceIdSerializer.get_resource_id_from_instance(
self.env, resource)
return self._select_relations(resource_full_id)
def _select_relations(
self, source=None, resource_type=None, destination=None):
#todo: add optional paging for possible umbrella tickets with
#a lot of child tickets
where = dict()
if source:
where["source"] = source
if resource_type:
where["type"] = resource_type
order_by = ["destination"]
else:
order_by = ["type", "destination"]
if destination:
where["destination"] = destination
return Relation.select(
self.env,
where=where,
order_by=order_by
)
def _parse_config(self):
links = []
labels = {}
validators = {}
blockers = {}
copy_fields = {}
exclusive = set()
config = self.config[RELATIONS_CONFIG_NAME]
for name in [option for option, _ in config.options()
if '.' not in option]:
reltypes = config.getlist(name)
if not reltypes:
continue
if len(reltypes) == 1:
reltypes += [None]
links.append(tuple(reltypes))
custom_validators = self._parse_validators(config, name)
for rel in filter(None, reltypes):
labels[rel] = \
config.get(rel + '.label') or rel.capitalize()
blockers[rel] = \
config.getbool(rel + '.blocks', default=False)
if config.getbool(rel + '.exclusive'):
exclusive.add(rel)
validators[rel] = custom_validators
# <end>.copy_fields may be absent or intentionally set empty.
# config.getlist() will return [] in either case, so check that
# the key is present before assigning the value
cf_key = '%s.copy_fields' % rel
if cf_key in config:
copy_fields[rel] = config.getlist(cf_key)
return links, labels, validators, blockers, copy_fields, exclusive
def _parse_validators(self, section, name):
custom_validators = set(
'%sValidator' % validator for validator in
set(section.getlist(name + '.validators', [], ',', True)))
validators = []
if custom_validators:
for impl in self.all_validators:
if impl.__class__.__name__ in custom_validators:
validators.append(impl)
return validators
def validate(self, relation):
"""
Validate the relation using the configured validators. Validation is
always run on the relation with master type.
"""
backrel = self.get_reverted_relation(relation)
if backrel and (backrel.type, relation.type) in self._links:
relation = backrel
for validator in self.global_validators:
validator.validate(relation)
for validator in self._validators.get(relation.type, ()):
validator.validate(relation)
def is_blocker(self, relation_type):
return self._blockers[relation_type]
def render_relation_type(self, end):
return self._labels[end]
def get_relation_types(self):
return self._labels
def find_blockers(self, resource_instance, is_blocker_method):
# tbd: do we blocker finding to be recursive
all_blockers = []
for relation in self._select_relations_for_resource_instance(
resource_instance):
if self.is_blocker(relation.type):
resource = ResourceIdSerializer.get_resource_by_id(
relation.destination)
resource_instance = is_blocker_method(resource)
if resource_instance is not None:
all_blockers.append(resource_instance)
# blockers = self._recursive_find_blockers(
# relation, is_blocker_method)
# if blockers:
# all_blockers.extend(blockers)
return all_blockers
def get_resource_name(self, resource_id):
resource = ResourceIdSerializer.get_resource_by_id(resource_id)
return get_resource_shortname(self.env, resource)
class ResourceIdSerializer(object):
RESOURCE_ID_DELIMITER = u":"
@classmethod
def get_resource_by_id(cls, resource_full_id):
"""
* resource_full_id: fully qualified resource id in format
"product:ticket:123". In case of global environment it is ":ticket:123"
"""
nbh_prefix, realm, resource_id = cls.split_full_id(resource_full_id)
if nbh_prefix:
neighborhood = Neighborhood('product', nbh_prefix)
return neighborhood.child(realm, id=resource_id)
else:
return Resource(realm, id=resource_id)
@classmethod
def split_full_id(cls, resource_full_id):
return resource_full_id.split(cls.RESOURCE_ID_DELIMITER)
@classmethod
def get_resource_id_from_instance(cls, env, resource_instance):
"""
* resource_instance: can be instance of a ticket, wiki page etc.
"""
resource = resource_instance.resource
# nbhprefix = ResourceSystem(env).neighborhood_prefix(
# resource.neighborhood)
#TODO: temporary workaround for the ticket specific behavior
#change it to generic resource behaviour
ticket = resource_instance
if ticket.id is None:
raise ValueError("Cannot get resource id for ticket "
"that does not exist yet.")
nbh_prefix = ticket["product"]
resource_full_id = cls.RESOURCE_ID_DELIMITER.join(
(nbh_prefix, resource.realm, unicode(resource.id))
)
return resource_full_id
class TicketRelationsSpecifics(Component):
implements(ITicketManipulator, ITicketChangeListener)
def __init__(self):
self.rls = RelationsSystem(self.env)
#ITicketChangeListener methods
def ticket_created(self, ticket):
pass
def ticket_changed(self, ticket, comment, author, old_values):
if self._closed_as_duplicate(ticket) and \
self.rls.duplicate_relation_type and \
hasattr(ticket, 'duplicate'): # workaround for comment:5:ticket:710
try:
self.rls.add(ticket, ticket.duplicate,
self.rls.duplicate_relation_type,
comment, author)
except TracError:
pass
def _closed_as_duplicate(self, ticket):
return (ticket['status'] == 'closed' and
ticket['resolution'] == 'duplicate')
def ticket_deleted(self, ticket):
self.rls.delete_resource_relations(ticket)
#ITicketManipulator methods
def prepare_ticket(self, req, ticket, fields, actions):
pass
def validate_ticket(self, req, ticket):
return itertools.chain(
self._check_blockers(req, ticket),
self._check_open_children(req, ticket),
self._check_duplicate_id(req, ticket),
)
def _check_blockers(self, req, ticket):
action = req.args.get('action')
operations = self._get_operations_for_action(req, ticket, action)
if 'set_resolution' in operations:
blockers = self.rls.find_blockers(ticket, self.is_blocker)
if blockers:
blockers_str = ', '.join(
get_resource_shortname(self.env, blocker_ticket.resource)
for blocker_ticket in unique(blockers))
msg = ("Cannot resolve this ticket because it is "
"blocked by tickets [%s]"
% blockers_str)
yield None, msg
def _check_open_children(self, req, ticket):
action = req.args.get('action')
operations = self._get_operations_for_action(req, ticket, action)
if 'set_resolution' in operations:
for relation in [r for r in self.rls.get_relations(ticket)
if r['type'] == self.rls.PARENT_RELATION_TYPE]:
child_ticket = \
self._create_ticket_by_full_id(relation['destination'])
if child_ticket['status'] != 'closed':
msg = ("Cannot resolve this ticket because it has open "
"child tickets.")
yield None, msg
def _check_duplicate_id(self, req, ticket):
action = req.args.get('action')
operations = self._get_operations_for_action(req, ticket, action)
if 'set_resolution' in operations:
resolution = req.args.get('action_%s_resolve_resolution' % action)
if resolution == 'duplicate':
duplicate_id = req.args.get('duplicate_id')
if not duplicate_id:
yield None, "Duplicate ticket ID must be provided."
try:
duplicate_ticket = self.find_ticket(duplicate_id)
req.perm.require('TICKET_MODIFY',
Resource(duplicate_ticket.id))
ticket.duplicate = duplicate_ticket
except NoSuchTicketError:
yield None, "Invalid duplicate ticket ID."
def _get_operations_for_action(self, req, ticket, action):
operations = []
for controller in TicketSystem(self.env).action_controllers:
actions = [a for w, a in
controller.get_ticket_actions(req, ticket) or []]
if action in actions:
operations += controller.actions[action]['operations']
return operations
def find_ticket(self, ticket_spec):
ticket = None
m = re.match(r'#?(?:(?P<pid>[^-]+)-)?(?P<tid>\d+)', ticket_spec)
if m:
pid = m.group('pid')
tid = m.group('tid')
if pid:
try:
env = ProductEnvironment(self.env.parent, pid)
ticket = Ticket(env, tid)
except:
pass
else:
try:
ticket = Ticket(self.env, tid)
except ResourceNotFound:
# ticket not found in current product, try all other products
for p in Product.select(self.env):
if p.prefix != self.env.product.prefix:
# TODO: check for PRODUCT_VIEW permissions
penv = ProductEnvironment(self.env.parent,
p.prefix)
try:
ticket = Ticket(penv, tid)
except ResourceNotFound:
pass
else:
break
# ticket still not found, use fallback for <prefix>:ticket:<id> syntax
if ticket is None:
try:
resource = ResourceIdSerializer.get_resource_by_id(ticket_spec)
ticket = self._create_ticket_by_full_id(resource)
except:
raise NoSuchTicketError
return ticket
def is_blocker(self, resource):
ticket = self._create_ticket_by_full_id(resource)
if ticket['status'] != 'closed':
return ticket
return None
def _create_ticket_by_full_id(self, resource):
env = self._get_env_for_resource(resource)
if resource.realm == "ticket":
return Ticket(env, resource.id)
else:
raise TracError("Resource type %s is not supported by " +
"Bloodhound Relations" % resource.realm)
def _get_env_for_resource(self, resource):
if hasattr(resource, "neighborhood"):
env = ResourceSystem(self.env). \
load_component_manager(resource.neighborhood)
else:
env = self.env
return env
class TicketChangeRecordUpdater(Component):
implements(IRelationChangingListener)
def adding_relation(self, relation):
self.update_tickets_change_records(
relation, False, relation.time)
def deleting_relation(self, relation, when):
when_ts = to_utimestamp(when)
self.update_tickets_change_records(relation, True, when_ts)
def update_tickets_change_records(self, relation, is_delete, when_ts):
relation_system = RelationsSystem(self.env)
with self.env.db_direct_transaction as db:
self._add_ticket_change_record(
db,
relation,
relation_system,
is_delete,
when_ts
)
reverted_relation = relation_system.get_reverted_relation(relation)
if reverted_relation:
self._add_ticket_change_record(
db,
reverted_relation,
relation_system,
is_delete,
when_ts
)
def _get_ticket_id_and_product(self, resource_full_id):
nbh_prefix, realm, resource_id = \
ResourceIdSerializer.split_full_id(resource_full_id)
ticket_id = None
if realm == "ticket":
ticket_id = int(resource_id)
return ticket_id, nbh_prefix
def _add_ticket_change_record(
self, db, relation, relation_system, is_delete, when_ts):
ticket_id, product = self._get_ticket_id_and_product(relation.source)
if ticket_id is None:
return
related_resource_name = relation_system.get_resource_name(
relation.destination)
if is_delete:
old_value = related_resource_name
new_value = None
else:
old_value = None
new_value = related_resource_name
description = 'Relation "%s"' % (
relation_system.render_relation_type(relation.type),)
db("""INSERT INTO ticket_change
(ticket, time, author, field, oldvalue, newvalue, product)
VALUES (%s, %s, %s, %s, %s, %s, %s)""",
(ticket_id,
when_ts,
relation.author,
description,
old_value,
new_value,
product))
class UnknownRelationType(ValueError):
pass
class NoSuchTicketError(ValueError):
pass
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/web_ui.py | bloodhound_relations/bhrelations/web_ui.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Ticket relations for Apache(TM) Bloodhound
Ticket relations user interface.
"""
import re
from trac.core import Component, TracError, implements
from trac.resource import Resource, get_resource_shortname, \
get_resource_summary, get_resource_url
from trac.ticket.model import Ticket
from trac.util import exception_to_unicode, to_unicode
from trac.web.api import IRequestFilter, IRequestHandler
from trac.web.chrome import ITemplateProvider, add_warning
from bhrelations.api import NoSuchTicketError, RelationsSystem, \
ResourceIdSerializer, TicketRelationsSpecifics, \
UnknownRelationType
from bhrelations.model import Relation
from bhrelations.utils.translation import _
from bhrelations.validation import ValidationError
class RelationManagementModule(Component):
implements(IRequestFilter, IRequestHandler, ITemplateProvider)
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/ticket/([0-9]+)/relations/*$', req.path_info)
if not match:
return False
req.args['id'] = match.group(1)
return True
def process_request(self, req):
tid = req.args.get('id')
if not tid:
raise TracError(_('No ticket id provided.'))
try:
ticket = Ticket(self.env, tid)
except ValueError:
raise TracError(_('Invalid ticket id.'))
# For access to the relation management, TICKET_MODIFY is required.
req.perm.require('TICKET_MODIFY')
relsys = RelationsSystem(self.env)
data = {
'relation': {},
}
if req.method == 'POST':
# for modifying the relations TICKET_MODIFY is required for
# both the source and the destination tickets
if 'remove' in req.args:
rellist = req.args.get('sel')
if rellist:
if isinstance(rellist, basestring):
rellist = [rellist, ]
self.remove_relations(req, rellist)
elif 'add' in req.args:
relation = dict(
destination=req.args.get('dest_tid', ''),
type=req.args.get('reltype', ''),
comment=req.args.get('comment', ''),
)
try:
trs = TicketRelationsSpecifics(self.env)
dest_ticket = trs.find_ticket(relation['destination'])
except NoSuchTicketError:
data['error'] = _('Invalid ticket ID.')
else:
req.perm.require('TICKET_MODIFY', Resource(dest_ticket.id))
try:
dbrel = relsys.add(ticket, dest_ticket,
relation['type'],
relation['comment'], req.authname)
except NoSuchTicketError:
data['error'] = _('Invalid ticket ID.')
except UnknownRelationType:
data['error'] = _('Unknown relation type.')
except ValidationError as ex:
data['error'] = ex.message
else:
# Notify
try:
self.notify_relation_changed(dbrel)
except Exception, e:
self.log.error("Failure sending notification on"
"creation of relation: %s",
exception_to_unicode(e))
add_warning(req, _("The relation has been added, "
"but an error occurred while "
"sending notifications: "
"%(message)s",
message=to_unicode(e)))
if 'error' in data:
data['relation'] = relation
else:
raise TracError(_('Invalid operation.'))
data.update({
'ticket': ticket,
'reltypes': sorted(relsys.get_relation_types().iteritems(),
key=lambda x: x[0]),
'relations': self.get_ticket_relations(ticket),
'get_resource_shortname': get_resource_shortname,
'get_resource_summary': get_resource_summary,
})
return 'relations_manage.html', data, None
def notify_relation_changed(self, relation):
from bhrelations.notification import RelationNotifyEmail
RelationNotifyEmail(self.env).notify(relation)
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
from pkg_resources import resource_filename
return [resource_filename('bhrelations', 'templates')]
# IRequestFilter methods
def pre_process_request(self, req, handler):
return handler
def post_process_request(self, req, template, data, content_type):
if req.path_info.startswith('/ticket/'):
ticket = data['ticket']
rls = RelationsSystem(self.env)
try:
resid = ResourceIdSerializer \
.get_resource_id_from_instance(self.env, ticket)
except ValueError:
resid = None
if rls.duplicate_relation_type and resid is not None:
duplicate_relations = \
rls._select_relations(resid, rls.duplicate_relation_type)
if duplicate_relations:
data['ticket_duplicate_of'] = \
duplicate_relations[0].destination
return template, data, content_type
# Public methods
def get_ticket_relations(self, ticket):
grouped_relations = {}
relsys = RelationsSystem(self.env)
reltypes = relsys.get_relation_types()
trs = TicketRelationsSpecifics(self.env)
for r in relsys.get_relations(ticket):
r['desthref'] = get_resource_url(self.env, r['destination'],
self.env.href)
r['destticket'] = trs._create_ticket_by_full_id(r['destination'])
grouped_relations.setdefault(reltypes[r['type']], []).append(r)
return grouped_relations
def remove_relations(self, req, rellist):
relsys = RelationsSystem(self.env)
for relid in rellist:
relation = Relation.load_by_relation_id(self.env, relid)
resource = \
ResourceIdSerializer.get_resource_by_id(relation.destination)
if 'TICKET_MODIFY' in req.perm(resource):
relsys.delete(relid)
else:
add_warning(req, _('Insufficient permissions to remove '
'relation "%(relation)s"', relation=relid))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/db_default.py | bloodhound_relations/bhrelations/db_default.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from bhrelations.model import Relation
DB_SYSTEM_KEY = 'bhrelations'
DB_VERSION = 2
# pylint: disable=protected-access
SCHEMA = [mcls._get_schema() for mcls in (Relation, )]
migrations = [
]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/model.py | bloodhound_relations/bhrelations/model.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.resource import Resource
from trac.util.datefmt import to_utimestamp, from_utimestamp
from bhdashboard.model import ModelBase
class Relation(ModelBase):
"""The Relation table"""
RELATION_ID_DELIMITER = u','
_meta = {
'table_name': 'bloodhound_relations',
'object_name': 'Relation',
'key_fields': ['source', 'type', 'destination'],
'non_key_fields': [
'comment',
'author',
{'name': 'time', 'type': 'int64'},
],
'no_change_fields': ['source', 'destination', 'type'],
'unique_fields': [],
}
@property
def resource(self):
"""Allow Relation to be treated as a Resource"""
return Resource('relation', self.prefix)
@property
def when(self):
when_ts = self._data.get('time')
if when_ts is not None:
return from_utimestamp(when_ts)
return None
@when.setter
def when(self, value):
when_ts = to_utimestamp(value)
self._data["time"] = when_ts
def clone_reverted(self, type):
data = self._data.copy()
data['type'] = type
data['source'] = self.destination
data['destination'] = self.source
relation = Relation(self._env)
# pylint: disable=protected-access
relation._data = data
return relation
def clone(self):
data = self._data.copy()
relation = Relation(self._env)
# pylint: disable=protected-access
relation._data = data
return relation
def get_relation_id(self):
return self.RELATION_ID_DELIMITER.join((
self.source,
self.destination,
self.type))
@classmethod
def _parse_relation_id(cls, relation_id):
source, destination, relation_type = relation_id.split(
cls.RELATION_ID_DELIMITER)
return source, destination, relation_type
@classmethod
def load_by_relation_id(cls, env, relation_id):
source, destination, relation_type = cls._parse_relation_id(
relation_id)
return Relation(env, keys=dict(
source=source,
destination=destination,
type=relation_type
))
def __str__(self):
return '%s %s %s' % (self.source, self.type, self.destination)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/validation.py | bloodhound_relations/bhrelations/validation.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from genshi.builder import tag
from trac.core import Component, implements, TracError
from trac.resource import get_resource_shortname
from trac.util.translation import tag_
from bhrelations.api import IRelationValidator, RelationsSystem, \
ResourceIdSerializer, TicketRelationsSpecifics
class ValidationError(TracError):
"""Exception raised when a validator fails."""
def __init__(self, message, title=None, show_traceback=False):
super(ValidationError, self).__init__(
message, title, show_traceback)
self.failed_ids = []
class Validator(Component):
implements(IRelationValidator)
def validate(self, relation):
raise NotImplementedError
def render_relation_type(self, end):
return RelationsSystem(self.env)._labels[end]
def get_resource_name(self, resource_id):
resource = ResourceIdSerializer.get_resource_by_id(resource_id)
return get_resource_shortname(self.env, resource)
def _find_path(self, source, destination, relation_type):
known_nodes, paths = self._bfs(source, destination, relation_type)
return paths.get((source, destination), None)
def _descendants(self, source, relation_type):
known_nodes, paths = self._bfs(source, None, relation_type)
return known_nodes - set([source])
def _ancestors(self, source, relation_type):
known_nodes, paths = self._bfs(source, None, relation_type,
reverse=True)
return known_nodes - set([source])
def _bfs(self, source, destination, relation_type, reverse=False):
known_nodes = set([source])
new_nodes = set([source])
paths = {(source, source): [source]}
while new_nodes:
if reverse:
relation = 'source, destination'
origin = 'source'
else:
relation = 'destination, source'
origin = 'destination'
relation_types = \
','.join("'%s'" % r for r in relation_type.split(','))
query = """
SELECT %(relation)s
FROM bloodhound_relations
WHERE type IN (%(relation_type)s)
AND %(origin)s IN (%(new_nodes)s)
""" % dict(
relation=relation,
relation_type=relation_types,
new_nodes=', '.join("'%s'" % n for n in new_nodes),
origin=origin)
new_nodes = set()
for s, d in self.env.db_query(query):
if d not in known_nodes:
new_nodes.add(d)
paths[(source, d)] = paths[(source, s)] + [d]
known_nodes = set.union(known_nodes, new_nodes)
if destination in new_nodes:
break
return known_nodes, paths
class NoCyclesValidator(Validator):
def validate(self, relation):
"""If a path exists from relation's destination to its source,
adding the relation will create a cycle.
"""
path = self._find_path(relation.source,
relation.destination,
relation.type)
if path:
cycle_str = map(self.get_resource_name, path)
error = 'Cycle in ''%s'': %s' % (
self.render_relation_type(relation.type),
' -> '.join(cycle_str))
error = ValidationError(error)
error.failed_ids = path
raise error
class ExclusiveValidator(Validator):
def validate(self, relation):
"""If a path of exclusive type exists between source and destination,
adding a relation is not allowed.
"""
rls = RelationsSystem(self.env)
source, destination = relation.source, relation.destination
for exclusive_type in rls._exclusive:
path = (self._find_path(source, destination, exclusive_type)
or self._find_path(destination, source, exclusive_type))
if path:
raise ValidationError(
"Cannot add relation %s, source and destination "
"are connected with %s relation." % (
self.render_relation_type(relation.type),
self.render_relation_type(exclusive_type),
)
)
if relation.type in rls._exclusive:
d_ancestors = self._ancestors(destination, exclusive_type)
d_ancestors.add(destination)
s_descendants = self._descendants(source, exclusive_type)
s_descendants.add(source)
query = """
SELECT source, destination, type
FROM bloodhound_relations
WHERE (source in (%(s_ancestors)s)
AND destination in (%(d_descendants)s))
OR
(source in (%(d_descendants)s)
AND destination in (%(s_ancestors)s))
""" % dict(
s_ancestors=', '.join("'%s'" % n for n in d_ancestors),
d_descendants=', '.join("'%s'" % n for n in s_descendants))
conflicting_relations = list(self.env.db_query(query))
if conflicting_relations:
raise ValidationError(
"Connecting %s and %s with relation %s "
"would make the following relations invalid:\n"
"%s" % (
source,
destination,
self.render_relation_type(relation.type),
'\n'.join(map(str, conflicting_relations))
)
)
class SingleProductValidator(Validator):
def validate(self, relation):
product1, product2 = map(self.get_product,
(relation.source, relation.destination))
if product1 != product2:
raise ValidationError(
"Resources for %s relation must belong to the same product." %
self.render_relation_type(relation.type)
)
def get_product(self, resource_id):
return ResourceIdSerializer.split_full_id(resource_id)[0]
class NoSelfReferenceValidator(Validator):
def validate(self, relation):
if relation.source == relation.destination:
error = ValidationError(
'Ticket cannot be self-referenced in a relation.')
error.failed_ids = [relation.source]
raise error
class OneToManyValidator(Validator):
"""Only tree relationships are allowed. A ticket cannot have multiple
parents."""
def validate(self, relation):
rls = RelationsSystem(self.env)
existing_relations = \
rls._select_relations(resource_type=relation.type,
destination=relation.destination)
if existing_relations:
raise ValidationError(
tag_("Another resource is already related to %(destination)s "
"with %(relation)s relation.",
destination=tag.em(relation.destination),
relation=tag.b(self.render_relation_type(relation.type)))
)
class ReferencesOlderValidator(Validator):
def validate(self, relation):
source, destination = map(ResourceIdSerializer.get_resource_by_id,
[relation.source, relation.destination])
if source.realm == 'ticket' and destination.realm == 'ticket':
source, destination = map(
TicketRelationsSpecifics(self.env)._create_ticket_by_full_id,
[source, destination])
if destination['time'] > source['time']:
raise ValidationError(
"Relation %s must reference an older resource." %
self.render_relation_type(relation.type)
)
class BlockerValidator(Validator):
def validate(self, relation):
"""Prevents adding a cyclical blocker relation.
"""
rls = RelationsSystem(self.env)
if not rls.is_blocker(relation.type):
relation = rls.get_reverted_relation(relation)
if not relation or not rls.is_blocker(relation.type):
return
blockers = ','.join(b for b, is_blocker in rls._blockers.items()
if is_blocker)
path = self._find_path(relation.source,
relation.destination,
blockers)
if path:
cycle_str = map(self.get_resource_name, path)
error = 'Cycle in ''%s'': %s' % (
self.render_relation_type(relation.type),
' -> '.join(cycle_str))
error = ValidationError(error)
error.failed_ids = path
raise error
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/__init__.py | bloodhound_relations/bhrelations/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/notification.py | bloodhound_relations/bhrelations/notification.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.notification import NotifyEmail
from trac.ticket.notification import TicketNotifyEmail, \
get_ticket_notification_recipients
from trac.util.datefmt import from_utimestamp
from trac.web.chrome import Chrome
from bhrelations.api import ResourceIdSerializer, TicketRelationsSpecifics
class RelationNotifyEmail(TicketNotifyEmail):
template_name = "relation_notify_email.txt"
imitate_ticket_notification = False
def notify(self, relation, deleted=False):
self.relation = relation
source = ResourceIdSerializer.get_resource_by_id(relation.source)
if source.realm == 'ticket':
self.imitate_ticket_notification = True
helper = TicketRelationsSpecifics(self.env)
t = helper._create_ticket_by_full_id(source)
self.template = Chrome(self.env).load_template(
TicketNotifyEmail.template_name, method='text')
if deleted:
modtime = deleted
else:
modtime = from_utimestamp(relation.time)
TicketNotifyEmail.notify(self, t, newticket=False, modtime=modtime)
else:
self._generic_notify(relation, deleted)
def _generic_notify(self, relation, deleted):
self.data.update(dict(
created=not deleted,
relation=relation,
))
NotifyEmail.notify(self, '', '', '')
def send(self, torcpts, ccrcpts):
if self.imitate_ticket_notification:
TicketNotifyEmail.send(self, torcpts, ccrcpts)
else:
NotifyEmail.send(self, torcpts, ccrcpts)
def get_recipients(self, relid):
relation = self.relation
source, destination = map(ResourceIdSerializer.get_resource_by_id,
(relation.source, relation.destination))
to, cc = [], []
for resource in (source, destination):
if resource.realm == 'ticket':
torecipients, ccrecipients, reporter, owner = \
get_ticket_notification_recipients(self.env, self.config,
resource.id, [])
to.extend(torecipients)
cc.extend(ccrecipients)
return to, cc
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/search.py | bloodhound_relations/bhrelations/tests/search.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import shutil
import tempfile
import unittest
from bhsearch.api import BloodhoundSearchApi
# TODO: Figure how to get trac to load components from these modules
import bhsearch.query_parser, bhsearch.search_resources.ticket_search, \
bhsearch.whoosh_backend
import bhrelations.search
from bhrelations.tests.base import BaseRelationsTestCase, DEPENDENCY_OF
class SearchIntegrationTestCase(BaseRelationsTestCase):
def setUp(self):
BaseRelationsTestCase.setUp(self, enabled=['bhsearch.*'])
self.global_env.path = tempfile.mkdtemp('bhrelations-tempenv')
self.search_api = BloodhoundSearchApi(self.env)
self.search_api.upgrade_environment(self.env.db_transaction)
def tearDown(self):
shutil.rmtree(self.env.path)
BaseRelationsTestCase.tearDown(self)
def test_relations_are_indexed_on_creation(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.add_relation(t1, DEPENDENCY_OF, t2)
result = self.search_api.query('%s:#2' % DEPENDENCY_OF)
self.assertEqual(result.hits, 1)
def test_relations_are_indexed_on_deletion(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.add_relation(t1, DEPENDENCY_OF, t2)
relations = self.get_relations(t1)
self.relations_system.delete(relations[0]["relation_id"])
result = self.search_api.query('%s:#2' % DEPENDENCY_OF)
self.assertEqual(result.hits, 0)
def test_different_types_of_queries(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.add_relation(t1, DEPENDENCY_OF, t2)
self.assertEqual(self.search_api.query('%s:#2'
% DEPENDENCY_OF).hits, 1)
self.assertEqual(self.search_api.query('%s:#tp1-2'
% DEPENDENCY_OF).hits, 1)
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(SearchIntegrationTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/api.py | bloodhound_relations/bhrelations/tests/api.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from datetime import datetime
from trac.core import TracError
from trac.ticket.model import Ticket
from trac.util.datefmt import utc
from multiproduct.env import ProductEnvironment
from bhrelations.api import TicketRelationsSpecifics
from bhrelations.tests.mocks import TestRelationChangingListener
from bhrelations.validation import ValidationError
from bhrelations.tests.base import BaseRelationsTestCase, BLOCKED_BY, \
BLOCKS, CHILD, DEPENDENCY_OF, DEPENDS_ON, \
DUPLICATE_OF, MULTIPRODUCT_REL, PARENT, \
REFERS_TO
class ApiTestCase(BaseRelationsTestCase):
def test_can_add_two_ways_relations(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket, DEPENDENCY_OF, ticket2)
#assert
relations = self.get_relations(ticket)
self.assertEqual(DEPENDENCY_OF, relations[0]["type"])
self.assertEqual(unicode(ticket2.id), relations[0]["destination"].id)
relations = self.get_relations(ticket2)
self.assertEqual(DEPENDS_ON, relations[0]["type"])
self.assertEqual(unicode(ticket.id), relations[0]["destination"].id)
def test_can_add_single_way_relations(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket, REFERS_TO, ticket2)
#assert
relations = self.get_relations(ticket)
self.assertEqual(1, len(relations))
self.assertEqual(REFERS_TO, relations[0]["type"])
self.assertEqual(unicode(ticket2.id), relations[0]["destination"].id)
self.assertEqual(0, len(self.get_relations(ticket2)))
def test_can_add_multiple_relations(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
ticket3 = self._insert_and_load_ticket("A3")
#act
self.add_relation(ticket, DEPENDS_ON, ticket2)
self.add_relation(ticket, DEPENDS_ON, ticket3)
#assert
self.assertEqual(2, len(self.get_relations(ticket)))
self.assertEqual(1, len(self.get_relations(ticket2)))
self.assertEqual(1, len(self.get_relations(ticket3)))
def test_will_not_create_more_than_one_identical_relations(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket, DEPENDS_ON, ticket2)
self.assertRaisesRegexp(
TracError,
"already exists",
self.add_relation,
ticket, DEPENDS_ON, ticket2
)
def test_will_not_create_more_than_one_identical_relations_db_level(self):
sql = """INSERT INTO bloodhound_relations (source, destination, type)
VALUES (%s, %s, %s)"""
with self.env.db_transaction as db:
db(sql, ["1", "2", DEPENDS_ON])
self.assertRaises(
self.env.db_exc.IntegrityError,
db,
sql,
["1", "2", DEPENDS_ON]
)
def test_can_add_one_way_relations(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket, REFERS_TO, ticket2)
#assert
relations = self.get_relations(ticket)
self.assertEqual(REFERS_TO, relations[0]["type"])
self.assertEqual(unicode(ticket2.id),
relations[0]["destination"].id)
self.assertEqual(0, len(self.get_relations(ticket2)))
def test_can_delete_two_ways_relation(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket, DEPENDS_ON, ticket2)
relations = self.get_relations(ticket)
self.assertEqual(1, len(relations))
self.assertEqual(1, len(self.get_relations(ticket2)))
#act
self.delete_relation(relations[0])
#assert
self.assertEqual(0, len(self.get_relations(ticket)))
self.assertEqual(0, len(self.get_relations(ticket2)))
def test_can_delete_single_way_relation(self):
#arrange
ticket = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket, REFERS_TO, ticket2)
relations = self.get_relations(ticket)
self.assertEqual(1, len(relations))
self.assertEqual(0, len(self.get_relations(ticket2)))
#act
self.delete_relation(relations[0])
#assert
self.assertEqual(0, len(self.get_relations(ticket)))
def test_can_not_add_cycled_immediate_relations(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
try:
self.add_relation(ticket2, DEPENDS_ON, ticket1)
self.fail("Should throw an exception")
except ValidationError as ex:
self.assertSequenceEqual(
["tp1:ticket:2", "tp1:ticket:1"], ex.failed_ids)
def test_can_add_more_depends_ons(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
ticket3 = self._insert_and_load_ticket("A3")
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
self.add_relation(ticket1, DEPENDS_ON, ticket3)
self.assertEqual(2, len(self.get_relations(ticket1)))
def test_can_not_add_cycled_in_different_direction(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, DEPENDENCY_OF, ticket2
)
def test_can_not_add_cycled_relations(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
ticket3 = self._insert_and_load_ticket("A3")
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
self.add_relation(ticket2, DEPENDS_ON, ticket3)
self.assertRaises(
ValidationError,
self.add_relation,
ticket3, DEPENDS_ON, ticket1
)
def test_can_not_add_more_than_one_parent(self):
#arrange
child = self._insert_and_load_ticket("A1")
parent1 = self._insert_and_load_ticket("A2")
parent2 = self._insert_and_load_ticket("A3")
#act
self.add_relation(parent1, PARENT, child)
self.assertRaises(
ValidationError,
self.add_relation,
parent2, PARENT, child
)
self.assertRaises(
ValidationError,
self.add_relation,
child, CHILD, parent2
)
def test_can_add_more_than_one_child(self):
parent = self._insert_and_load_ticket("A1")
child1 = self._insert_and_load_ticket("A2")
child2 = self._insert_and_load_ticket("A3")
self.add_relation(parent, PARENT, child1)
self.add_relation(parent, PARENT, child2)
self.assertEqual(2, len(self.get_relations(parent)))
def test_ticket_can_be_resolved(self):
#arrange
parent = self._insert_and_load_ticket("A1")
child = self._insert_and_load_ticket("A2")
#act
self.add_relation(parent, PARENT, child)
self.req.args['action'] = 'resolve'
warnings = \
TicketRelationsSpecifics(self.env).validate_ticket(self.req, child)
self.assertEqual(0, len(list(warnings)))
def test_can_save_and_load_relation_time(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
time = datetime.now(utc)
self.add_relation(ticket1, DEPENDS_ON, ticket2, when=time)
relations = self.get_relations(ticket1)
#assert
self.assertEqual(time, relations[0]["when"])
def test_cannot_resolve_ticket_when_blocker_is_unresolved(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket1, DEPENDS_ON, ticket2)
#act
self.req.args["action"] = 'resolve'
warnings = TicketRelationsSpecifics(self.env).validate_ticket(
self.req, ticket1)
#asset
self.assertEqual(1, len(list(warnings)))
def test_can_resolve_ticket_when_blocker_is_resolved(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2", status="closed")
self.add_relation(ticket1, DEPENDS_ON, ticket2)
#act
self.req.args["action"] = 'resolve'
warnings = TicketRelationsSpecifics(self.env).validate_ticket(
self.req, ticket1)
#assert
self.assertEqual(0, len(list(warnings)))
def test_that_relations_are_deleted_when_ticket_is_deleted(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket1, DEPENDS_ON, ticket2)
self.assertEqual(1, len(self.get_relations(ticket2)))
#act
ticket1.delete()
#assert
self.assertEqual(0, len(self.get_relations(ticket2)))
def test_that_no_error_when_deleting_ticket_without_relations(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
#act
ticket1.delete()
def test_can_add_multi_product_relations(self):
ticket1 = self._insert_and_load_ticket("A1")
product2 = "tp2"
self._load_product_from_data(self.global_env, product2)
p2_env = ProductEnvironment(self.global_env, product2)
ticket2 = self._insert_and_load_ticket_with_env(p2_env, "A2")
self.add_relation(ticket1, MULTIPRODUCT_REL, ticket2)
self.assertEqual(1, len(self.get_relations(ticket1)))
self.assertEqual(1, len(self.get_relations(ticket2)))
def _debug_select(self):
"""
used for debug purposes
"""
print " source, destination, type"
sql = "SELECT source, destination, type FROM bloodhound_relations"
with self.env.db_query as db:
# for row in db(sql, ("source", "destination", "type")):
for row in db(sql):
print row
def test_parent_relation_is_incompatible_with_two_way_relations(self):
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket2, DEPENDS_ON, ticket1)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, PARENT, ticket2
)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, CHILD, ticket2
)
def test_parent_relation_is_incompatible_with_one_way_relations(self):
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket1, REFERS_TO, ticket2)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, PARENT, ticket2
)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, CHILD, ticket2
)
def test_parent_must_be_in_same_product(self):
ticket1 = self._insert_and_load_ticket("A1")
product2 = "tp2"
self._load_product_from_data(self.global_env, product2)
p2_env = ProductEnvironment(self.global_env, product2)
ticket2 = self._insert_and_load_ticket_with_env(p2_env, "A2")
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, PARENT, ticket2
)
self.assertRaises(
ValidationError,
self.add_relation,
ticket1, CHILD, ticket2
)
def test_cannot_create_other_relations_between_descendants(self):
t1, t2, t3, t4, t5 = map(self._insert_and_load_ticket, "12345")
self.add_relation(t1, PARENT, t2) # t1 -> t2
self.add_relation(t2, PARENT, t3) # / \
self.add_relation(t2, PARENT, t4) # t3 t4
self.assertRaises(
ValidationError,
self.add_relation,
t2, DEPENDS_ON, t1
)
self.assertRaises(
ValidationError,
self.add_relation,
t1, DEPENDS_ON, t2
)
self.assertRaises(
ValidationError,
self.add_relation,
t4, DEPENDS_ON, t1
)
self.assertRaises(
ValidationError,
self.add_relation,
t1, DEPENDS_ON, t3
)
try:
self.add_relation(t1, DEPENDS_ON, t5)
self.add_relation(t3, DEPENDS_ON, t4)
except ValidationError:
self.fail("Could not add valid relation.")
def test_cannot_add_parent_if_this_would_cause_invalid_relations(self):
t1, t2, t3, t4, t5 = map(self._insert_and_load_ticket, "12345")
self.add_relation(t1, PARENT, t2) # t1 -> t2
self.add_relation(t2, PARENT, t3) # / \
self.add_relation(t2, PARENT, t4) # t3 t4 t5
self.add_relation(t2, DEPENDS_ON, t5)
self.assertRaises(
ValidationError,
self.add_relation,
t2, PARENT, t5
)
self.assertRaises(
ValidationError,
self.add_relation,
t3, PARENT, t5
)
self.assertRaises(
ValidationError,
self.add_relation,
t5, PARENT, t1,
)
try:
self.add_relation(t1, PARENT, t5)
except ValidationError:
self.fail("Could not add valid relation.")
def test_cannot_close_ticket_with_open_children(self):
t1 = self._insert_and_load_ticket("1") # t1
t2 = self._insert_and_load_ticket("2", status='closed') # / | \
t3 = self._insert_and_load_ticket("3") # t2 t3 t4
t4 = self._insert_and_load_ticket("4")
self.add_relation(t1, PARENT, t2)
self.add_relation(t1, PARENT, t3)
self.add_relation(t1, PARENT, t4)
# A warning is be returned for each open ticket
self.req.args["action"] = 'resolve'
warnings = \
TicketRelationsSpecifics(self.env).validate_ticket(self.req, t1)
self.assertEqual(2, len(list(warnings)))
def test_duplicate_can_only_reference_older_ticket(self):
t1 = self._insert_and_load_ticket("1")
t2 = self._insert_and_load_ticket("2")
self.assertRaises(
ValidationError,
self.add_relation,
t1, DUPLICATE_OF, t2
)
self.add_relation(t2, DUPLICATE_OF, t1)
def test_detects_blocker_cycles(self):
t1, t2, t3, t4, t5 = map(self._insert_and_load_ticket, "12345")
self.add_relation(t1, BLOCKS, t2)
self.add_relation(t3, DEPENDS_ON, t2)
self.add_relation(t4, BLOCKED_BY, t3)
self.add_relation(t4, DEPENDENCY_OF, t5)
self.assertRaises(
ValidationError,
self.add_relation,
t2, BLOCKS, t1
)
self.assertRaises(
ValidationError,
self.add_relation,
t3, DEPENDENCY_OF, t1
)
self.assertRaises(
ValidationError,
self.add_relation,
t1, BLOCKED_BY, t2
)
self.assertRaises(
ValidationError,
self.add_relation,
t1, DEPENDS_ON, t5
)
self.add_relation(t1, DEPENDENCY_OF, t2)
self.add_relation(t2, BLOCKS, t3)
self.add_relation(t4, DEPENDS_ON, t3)
self.add_relation(t5, BLOCKED_BY, t4)
self.add_relation(t1, REFERS_TO, t2)
self.add_relation(t2, REFERS_TO, t1)
def test_can_find_ticket_by_id_from_same_env(self):
""" Can find ticket given #id"""
product2 = "tp2"
self._load_product_from_data(self.global_env, product2)
p2_env = ProductEnvironment(self.global_env, product2)
t1 = self._insert_and_load_ticket_with_env(p2_env, "T1")
trs = TicketRelationsSpecifics(p2_env)
ticket = trs.find_ticket("#%d" % t1.id)
self.assertEqual(ticket.id, 1)
def test_can_find_ticket_by_id_from_different_env(self):
""" Can find ticket from different env given #id"""
product2 = "tp2"
self._load_product_from_data(self.global_env, product2)
p2_env = ProductEnvironment(self.global_env, product2)
t1 = self._insert_and_load_ticket_with_env(p2_env, "T1")
trs = TicketRelationsSpecifics(self.env)
ticket = trs.find_ticket("#%d" % t1.id)
self.assertEqual(ticket.id, 1)
def test_can_find_ticket_by_product_and_id(self):
""" Can find ticket given #prefix-id"""
product2 = "tp2"
self._load_product_from_data(self.global_env, product2)
p2_env = ProductEnvironment(self.global_env, product2)
t1 = self._insert_and_load_ticket_with_env(p2_env, "T1")
trs = TicketRelationsSpecifics(self.env)
ticket = trs.find_ticket("#%s-%d" % (product2, t1.id))
self.assertEqual(ticket.id, 1)
class RelationChangingListenerTestCase(BaseRelationsTestCase):
def test_can_sent_adding_event(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
test_changing_listener = self.env[TestRelationChangingListener]
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
#assert
self.assertEqual("adding_relation", test_changing_listener.action)
relation = test_changing_listener.relation
self.assertEqual(DEPENDS_ON, relation.type)
def test_can_sent_deleting_event(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
test_changing_listener = self.env[TestRelationChangingListener]
self.add_relation(ticket1, DEPENDS_ON, ticket2)
#act
relations = self.get_relations(ticket1)
self.delete_relation(relations[0])
#assert
self.assertEqual("deleting_relation", test_changing_listener.action)
relation = test_changing_listener.relation
self.assertEqual(DEPENDS_ON, relation.type)
class TicketChangeRecordUpdaterTestCase(BaseRelationsTestCase):
def test_can_update_ticket_history_on_relation_add_on(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
#act
self.add_relation(ticket1, DEPENDS_ON, ticket2)
#assert
change_log1 = Ticket(self.env, ticket1.id).get_changelog()
self.assertEquals(1, len(change_log1))
change_log2 = Ticket(self.env, ticket2.id).get_changelog()
self.assertEquals(1, len(change_log2))
def test_can_update_ticket_history_on_relation_deletion(self):
#arrange
ticket1 = self._insert_and_load_ticket("A1")
ticket2 = self._insert_and_load_ticket("A2")
self.add_relation(ticket1, DEPENDS_ON, ticket2)
relations = self.get_relations(ticket1)
#act
self.delete_relation(relations[0])
#assert
change_log1 = Ticket(self.env, ticket1.id).get_changelog()
self.assertEquals(2, len(change_log1))
change_log2 = Ticket(self.env, ticket2.id).get_changelog()
self.assertEquals(2, len(change_log2))
def _debug_select(self, ticket_id=None):
"""
used for debug purposes
"""
# print " source, destination, type"
sql = "SELECT * FROM ticket_change"
print "db_direct_transaction result:"
with self.env.db_direct_transaction as db:
# for row in db(sql, ("source", "destination", "type")):
for row in db(sql):
print row
sql = "SELECT * FROM ticket_change"
print "db_transaction result:"
with self.env.db_transaction as db:
for row in db(sql):
print row
if ticket_id:
sql = """SELECT time, author, field, oldvalue, newvalue
FROM ticket_change WHERE ticket=%s"""
print "db_transaction select by ticket_id result:"
with self.env.db_transaction:
for row in self.env.db_query(sql, (ticket_id, )):
print row
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(ApiTestCase))
test_suite.addTest(unittest.makeSuite(RelationChangingListenerTestCase))
test_suite.addTest(unittest.makeSuite(TicketChangeRecordUpdaterTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/web_ui.py | bloodhound_relations/bhrelations/tests/web_ui.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from trac.ticket import Ticket
from trac.util.datefmt import to_utimestamp
from trac.web import RequestDone
from bhrelations.api import ResourceIdSerializer
from bhrelations.tests.base import DEPENDS_ON, DUPLICATE_OF, \
BaseRelationsTestCase
from bhrelations.web_ui import RelationManagementModule
from multiproduct.ticket.web_ui import TicketModule
class RelationManagementModuleTestCase(BaseRelationsTestCase):
def setUp(self):
BaseRelationsTestCase.setUp(self)
ticket_id = self._insert_ticket(self.env, "Foo")
self.req.method = 'POST'
self.req.args['id'] = ticket_id
def test_can_process_empty_request(self):
self.req.method = 'GET'
data = self.process_request()
self.assertSequenceEqual(data['relations'], [])
self.assertEqual(len(data['reltypes']), 11)
def test_handles_missing_ticket_id(self):
self.req.args['add'] = 'add'
data = self.process_request()
self.assertIn("Invalid ticket", data["error"])
def test_handles_invalid_ticket_id(self):
self.req.args['add'] = True
self.req.args['dest_tid'] = 'no such ticket'
data = self.process_request()
self.assertIn("Invalid ticket", data["error"])
def test_handles_missing_relation_type(self):
t2 = self._insert_ticket(self.env, "Bar")
self.req.args['add'] = True
self.req.args['dest_tid'] = str(t2)
data = self.process_request()
self.assertIn("Unknown relation type", data["error"])
def test_handles_invalid_relation_type(self):
t2 = self._insert_ticket(self.env, "Bar")
self.req.args['add'] = True
self.req.args['dest_tid'] = str(t2)
self.req.args['reltype'] = 'no such relation'
data = self.process_request()
self.assertIn("Unknown relation type", data["error"])
def test_shows_relation_that_was_just_added(self):
t2 = self._insert_ticket(self.env, "Bar")
self.req.args['add'] = True
self.req.args['dest_tid'] = str(t2)
self.req.args['reltype'] = DEPENDS_ON
data = self.process_request()
self.assertEqual(len(data["relations"]), 1)
def test_failure_to_notify_does_not_result_in_error(self):
t2 = self._insert_ticket(self.env, "Bar")
self.req.args['add'] = True
self.req.args['dest_tid'] = str(t2)
self.req.args['reltype'] = DEPENDS_ON
rlm = RelationManagementModule(self.env)
rlm.notify_relation_changed = self._failing_notification
rlm.process_request(self.req)
self.assertEqual(len(self.req.chrome['warnings']), 1)
def _failing_notification(self, relation):
raise Exception()
def process_request(self):
url, data, x = RelationManagementModule(self.env).process_request(
self.req)
return data
class ResolveTicketIntegrationTestCase(BaseRelationsTestCase):
def setUp(self):
BaseRelationsTestCase.setUp(self)
self.mock_request()
self.configure()
self.req.redirect = self.redirect
self.redirect_url = None
self.redirect_permanent = None
def test_creates_duplicate_relation_from_duplicate_id(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.assertRaises(RequestDone,
self.resolve_as_duplicate,
t2, self.get_id(t1))
relations = self.get_relations(t2)
self.assertEqual(len(relations), 1)
relation = relations[0]
self.assertEqual(relation['destination_id'], self.get_id(t1))
self.assertEqual(relation['type'], DUPLICATE_OF)
def test_prefills_duplicate_id_if_relation_exists(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.add_relation(t2, DUPLICATE_OF, t1)
self.req.path_info = '/ticket/%d' % t2.id
data = self.process_request()
self.assertIn('ticket_duplicate_of', data)
t1id = ResourceIdSerializer.get_resource_id_from_instance(self.env, t1)
self.assertEqual(data['ticket_duplicate_of'], t1id)
def test_can_set_duplicate_resolution_even_if_relation_exists(self):
t1 = self._insert_and_load_ticket("Foo")
t2 = self._insert_and_load_ticket("Bar")
self.add_relation(t2, DUPLICATE_OF, t1)
self.assertRaises(RequestDone,
self.resolve_as_duplicate,
t2, self.get_id(t1))
t2 = Ticket(self.env, t2.id)
self.assertEqual(t2['status'], 'closed')
self.assertEqual(t2['resolution'], 'duplicate')
def test_post_process_request_does_not_break_ticket(self):
t1 = self._insert_and_load_ticket("Foo")
self.req.path_info = '/ticket/%d' % t1.id
self.process_request()
def test_post_process_request_does_not_break_newticket(self):
self.req.path_info = '/newticket'
self.process_request()
def test_post_process_request_can_handle_none_data(self):
self.req.path_info = '/source'
RelationManagementModule(self.env).post_process_request(self.req, '',
None, '')
def resolve_as_duplicate(self, ticket, duplicate_id):
self.req.method = 'POST'
self.req.path_info = '/ticket/%d' % ticket.id
self.req.args['id'] = ticket.id
self.req.args['action'] = 'resolve'
self.req.args['action_resolve_resolve_resolution'] = 'duplicate'
self.req.args['duplicate_id'] = duplicate_id
self.req.args['view_time'] = str(to_utimestamp(ticket['changetime']))
self.req.args['submit'] = True
return self.process_request()
def process_request(self):
ticket_module = TicketModule(self.env)
ticket_module.match_request(self.req)
template, data, content_type = ticket_module.process_request(self.req)
template, data, content_type = \
RelationManagementModule(self.env).post_process_request(
self.req, template, data, content_type)
return data
def mock_request(self):
self.req.method = 'GET'
self.req.get_header = lambda x: None
self.req.authname = 'x'
self.req.session = {}
self.req.chrome = {'warnings': []}
self.req.form_token = ''
def configure(self):
config = self.env.config
config['ticket-workflow'].set('resolve', 'new -> closed')
config['ticket-workflow'].set('resolve.operations', 'set_resolution')
config['ticket-workflow'].set('resolve.permissions', 'TICKET_MODIFY')
with self.env.db_transaction as db:
db("INSERT INTO enum VALUES "
"('resolution', 'duplicate', 'duplicate')")
def redirect(self, url, permanent=False):
self.redirect_url = url
self.redirect_permanent = permanent
raise RequestDone
def get_id(self, ticket):
return ResourceIdSerializer.get_resource_id_from_instance(self.env,
ticket)
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(RelationManagementModuleTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/validation.py | bloodhound_relations/bhrelations/tests/validation.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from bhrelations.tests.base import BaseRelationsTestCase
from bhrelations.validation import Validator
class GraphFunctionsTestCase(BaseRelationsTestCase):
edges = [
('A', 'B', 'p'), # A H
('A', 'C', 'p'), # / \ /
('C', 'D', 'p'), # B C
('C', 'E', 'p'), # / \
('E', 'F', 'p'), # D E - F - G
('F', 'G', 'p'), #
('H', 'C', 'p'),
]
def setUp(self):
BaseRelationsTestCase.setUp(self)
# bhrelations point from destination to source
for destination, source, type in self.edges:
self.env.db_direct_transaction(
"""INSERT INTO bloodhound_relations (source, destination, type)
VALUES ('%s', '%s', '%s')""" %
(source, destination, type)
)
self.validator = Validator(self.env)
def test_find_path(self):
self.assertEqual(self.validator._find_path(u'A', u'E', u'p'),
[u'A', u'C', u'E'])
self.assertEqual(self.validator._find_path(u'A', u'G', u'p'),
[u'A', u'C', u'E', u'F', u'G'])
self.assertEqual(self.validator._find_path(u'H', u'D', u'p'),
[u'H', u'C', u'D'])
self.assertEqual(self.validator._find_path(u'E', u'A', u'p'), None)
self.assertEqual(self.validator._find_path(u'B', u'D', u'p'), None)
def test_descendants(self):
self.assertEqual(self.validator._descendants(u'B', u'p'), set())
self.assertEqual(self.validator._descendants(u'E', u'p'),
set([u'F', u'G']))
self.assertEqual(self.validator._descendants(u'H', u'p'),
set([u'C', u'D', u'E', u'F', u'G']))
def test_ancestors(self):
self.assertEqual(self.validator._ancestors(u'B', u'p'), set([u'A']))
self.assertEqual(self.validator._ancestors(u'E', u'p'),
set([u'A', u'C', u'H']))
self.assertEqual(self.validator._ancestors(u'H', u'p'), set())
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(GraphFunctionsTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/__init__.py | bloodhound_relations/bhrelations/tests/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from bhrelations.tests import api, notification, search, validation, web_ui
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(api.suite())
test_suite.addTest(notification.suite())
test_suite.addTest(search.suite())
test_suite.addTest(validation.suite())
test_suite.addTest(web_ui.suite())
return test_suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
else:
test_suite = suite()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/notification.py | bloodhound_relations/bhrelations/tests/notification.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from trac.tests.notification import SMTPServerStore, SMTPThreadedServer
from trac.ticket.tests.notification import SMTP_TEST_PORT, smtp_address
from bhrelations.tests.base import DEPENDENCY_OF, BaseRelationsTestCase
from bhrelations.notification import RelationNotifyEmail
class NotificationTestCase(BaseRelationsTestCase):
@classmethod
def setUpClass(cls):
cls.smtpd = CustomSMTPThreadedServer(SMTP_TEST_PORT)
cls.smtpd.start()
@classmethod
def tearDownClass(cls):
cls.smtpd.stop()
def setUp(self):
super(NotificationTestCase, self).setUp()
self.env.config.set('notification', 'smtp_enabled', 'true')
self.env.config.set('notification', 'always_notify_owner', 'true')
self.env.config.set('notification', 'always_notify_reporter', 'true')
self.env.config.set('notification', 'smtp_always_cc',
'joe.user@example.net, joe.bar@example.net')
self.env.config.set('notification', 'use_public_cc', 'true')
self.env.config.set('notification', 'smtp_port', str(SMTP_TEST_PORT))
self.env.config.set('notification', 'smtp_server', 'localhost')
self.notifier = RelationNotifyEmail(self.env)
def tearDown(self):
super(NotificationTestCase, self).tearDown()
self.smtpd.cleanup()
def test_recipients_of_both_related_tickets_get_notified(self):
"""To/Cc recipients"""
ticket = self._insert_and_load_ticket(
'Foo',
reporter='"Joe User" < joe.user@example.org >',
owner='joe.user@example.net',
cc='joe.user@example.com, joe.bar@example.org, '
'joe.bar@example.net'
)
ticket2 = self._insert_and_load_ticket(
'Bar',
reporter='"Bob User" < bob.user@example.org >',
owner='bob.user@example.net',
cc='bob.user@example.com, bob.bar@example.org, '
'bob.bar@example.net')
relation = self.add_relation(ticket, DEPENDENCY_OF, ticket2)
self.notifier.notify(relation)
recipients = self.smtpd.get_recipients()
# checks there is no duplicate in the recipient list
rcpts = []
for r in recipients:
self.failIf(r in rcpts)
rcpts.append(r)
# checks that all cc recipients have been notified
cc_list = self.env.config.get('notification', 'smtp_always_cc')
cc_list = "%s, %s, %s" % (cc_list, ticket['cc'], ticket2['cc'])
for r in cc_list.replace(',', ' ').split():
self.failIf(r not in recipients)
# checks that both owners have been notified
self.failIf(smtp_address(ticket['owner']) not in recipients)
self.failIf(smtp_address(ticket2['owner']) not in recipients)
# checks that both reporters have been notified
self.failIf(smtp_address(ticket['reporter']) not in recipients)
self.failIf(smtp_address(ticket2['reporter']) not in recipients)
def test_no_recipient_results_in_no_notification(self):
self.env.config.set('notification', 'smtp_always_cc', '')
ticket = self._insert_and_load_ticket('Foo', reporter='anonymous')
ticket2 = self._insert_and_load_ticket('Bar', reporter='anonymous')
relation = self.add_relation(ticket, DEPENDENCY_OF, ticket2)
self.notifier.notify(relation)
sender = self.smtpd.get_sender()
recipients = self.smtpd.get_recipients()
message = self.smtpd.get_message()
# checks that no message has been sent
self.failIf(recipients)
self.failIf(sender)
self.failIf(message)
def test_one_email_per_relation(self):
ticket = self._insert_and_load_ticket('Foo', reporter='anonymous')
ticket2 = self._insert_and_load_ticket('Bar', reporter='anonymous')
relation = self.add_relation(ticket, DEPENDENCY_OF, ticket2)
self.notifier.notify(relation)
relations = self.env.db_direct_query(
"SELECT * FROM bloodhound_relations")
self.assertEqual(len(relations), 2)
self.assertEqual(self.smtpd.messages_received(), 1)
self.smtpd.cleanup()
self.relations_system.delete(relation.get_relation_id())
relations = self.env.db_direct_query(
"SELECT * FROM bloodhound_relations")
self.assertEqual(len(relations), 0)
self.assertEqual(self.smtpd.messages_received(), 1)
class CustomSMTPServerStore(SMTPServerStore):
"""SMTPServerStore that can count received messages"""
def __init__(self):
SMTPServerStore.__init__(self)
self.messages = 0
def helo(self, args):
SMTPServerStore.helo(self, args)
self.messages += 1
class CustomSMTPThreadedServer(SMTPThreadedServer):
def __init__(self, port):
SMTPThreadedServer.__init__(self, port)
self.store = CustomSMTPServerStore()
def cleanup(self):
SMTPThreadedServer.cleanup(self)
self.store.messages = 0
def messages_received(self):
return self.store.messages
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(NotificationTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/base.py | bloodhound_relations/bhrelations/tests/base.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from tests.env import MultiproductTestCase
from trac.test import EnvironmentStub, Mock, MockPerm
from trac.ticket import Ticket
from trac.util.datefmt import utc
from multiproduct.env import ProductEnvironment
from bhrelations.api import EnvironmentSetup, RelationsSystem, \
RELATIONS_CONFIG_NAME
try:
from babel import Locale
locale_en = Locale.parse('en_US')
except ImportError:
locale_en = None
PARENT = "parent"
CHILD = "child"
REFERS_TO = "refersto"
DEPENDS_ON = "dependson"
DEPENDENCY_OF = "dependent"
DUPLICATE_OF = "duplicateof"
DUPLICATED_BY = "duplicatedby"
BLOCKED_BY = "blockedby"
BLOCKS = "blocks"
MULTIPRODUCT_REL = "mprel"
MULTIPRODUCT_BACKREL = "mpbackrel"
class BaseRelationsTestCase(MultiproductTestCase):
def setUp(self, enabled=()):
env = EnvironmentStub(
default_data=True,
enable=(['trac.*', 'multiproduct.*', 'bhrelations.*'] +
list(enabled))
)
env.config.set('bhrelations', 'global_validators',
'NoSelfReferenceValidator,ExclusiveValidator,'
'BlockerValidator')
env.config.set('bhrelations', 'duplicate_relation',
'duplicateof')
config_name = RELATIONS_CONFIG_NAME
env.config.set(config_name, 'dependency',
','.join([DEPENDS_ON, DEPENDENCY_OF]))
env.config.set(config_name, 'dependency.validators',
'NoCycles,SingleProduct')
env.config.set(config_name, 'dependson.blocks', 'true')
env.config.set(config_name, 'parent_children',
','.join([PARENT, CHILD]))
env.config.set(config_name, 'parent_children.validators',
'OneToMany,SingleProduct,NoCycles')
env.config.set(config_name, 'children.label', 'Overridden')
env.config.set(config_name, 'parent.copy_fields',
'summary, foo')
env.config.set(config_name, 'parent.exclusive', 'true')
env.config.set(config_name, 'multiproduct_relation',
','.join([MULTIPRODUCT_REL, MULTIPRODUCT_BACKREL]))
env.config.set(config_name, 'oneway', REFERS_TO)
env.config.set(config_name, 'duplicate',
','.join([DUPLICATE_OF, DUPLICATED_BY]))
env.config.set(config_name, 'duplicate.validators', 'ReferencesOlder')
env.config.set(config_name, 'duplicateof.label', 'is a duplicate of')
env.config.set(config_name, 'duplicatedby.label', 'duplicates')
env.config.set(config_name, 'blocker', ','.join([BLOCKED_BY, BLOCKS]))
env.config.set(config_name, 'blockedby.blocks', 'true')
self.global_env = env
self._upgrade_mp(self.global_env)
self._setup_test_log(self.global_env)
self._load_product_from_data(self.global_env, self.default_product)
self.env = ProductEnvironment(self.global_env, self.default_product)
self.req = Mock(href=self.env.href, authname='anonymous', tz=utc,
args=dict(action='dummy'),
locale=locale_en, lc_time=locale_en,
chrome={'warnings': []})
self.req.perm = MockPerm()
self.relations_system = RelationsSystem(self.env)
self._upgrade_env()
def tearDown(self):
self.global_env.reset_db()
def _upgrade_env(self):
environment_setup = EnvironmentSetup(self.env)
try:
environment_setup.upgrade_environment(self.env.db_transaction)
except self.env.db_exc.OperationalError:
# table remains but database version is deleted
pass
@classmethod
def _insert_ticket(cls, env, summary, **kw):
"""Helper for inserting a ticket into the database"""
ticket = Ticket(env)
ticket["summary"] = summary
for k, v in kw.items():
ticket[k] = v
return ticket.insert()
def _insert_and_load_ticket(self, summary, **kw):
return Ticket(self.env, self._insert_ticket(self.env, summary, **kw))
def _insert_and_load_ticket_with_env(self, env, summary, **kw):
return Ticket(env, self._insert_ticket(env, summary, **kw))
def add_relation(self, source, reltype, destination, *args, **kwargs):
return self.relations_system.add(source, destination, reltype,
*args, **kwargs)
def get_relations(self, ticket):
return self.relations_system.get_relations(ticket)
def delete_relation(self, relation):
self.relations_system.delete(relation["relation_id"])
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/tests/mocks.py | bloodhound_relations/bhrelations/tests/mocks.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.core import Component, implements
from bhrelations.api import IRelationChangingListener
class TestRelationChangingListener(Component):
implements(IRelationChangingListener)
def adding_relation(self, relation):
self.action = "adding_relation"
self.relation = relation
def deleting_relation(self, relation, when):
self.action = "deleting_relation"
self.relation = relation
self.when = when
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/utils/translation.py | bloodhound_relations/bhrelations/utils/translation.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Translation functions and classes.
"""
from trac.util.translation import domain_functions
#------------------------------------------------------
# Internationalization
#------------------------------------------------------
_, ngettext, tag_, tagn_, gettext, N_, add_domain = \
domain_functions('bhrelations', ('_', 'ngettext', 'tag_', 'tagn_',
'gettext', 'N_', 'add_domain'))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/utils/__init__.py | bloodhound_relations/bhrelations/utils/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Copied from trac/utils.py, ticket-links-trunk branch
def unique(seq):
"""Yield unique elements from sequence of hashables, preserving order.
(New in 0.13)
"""
seen = set()
return (x for x in seq if x not in seen and not seen.add(x))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/widgets/__init__.py | bloodhound_relations/bhrelations/widgets/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_relations/bhrelations/widgets/relations.py | bloodhound_relations/bhrelations/widgets/relations.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying ticket relations.
"""
from trac.resource import get_resource_shortname, get_resource_summary
from trac.ticket.model import Ticket
from bhdashboard.util import pretty_wrapper
from bhdashboard.util.widgets import WidgetBase, check_widget_name
from bhrelations.web_ui import RelationManagementModule
from bhrelations.utils.translation import _
__metaclass__ = type
class TicketRelationsWidget(WidgetBase):
"""Display ticket relations.
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'tid': {
'desc': """Source ticket id""",
'type': int
},
'max': {
'desc': """Limit the number of relations displayed""",
'type': int
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Gather list of relations and render data in compact view
"""
title = _('Related tickets')
params = ('tid', 'max')
tid, max_ = self.bind_params(name, options, *params)
ticket = Ticket(self.env, tid)
data = {
'ticket': ticket,
'relations': \
RelationManagementModule(self.env).get_ticket_relations(ticket),
'get_resource_shortname': get_resource_shortname,
'get_resource_summary': get_resource_summary,
}
return 'widget_relations.html', {
'title': title,
'data': data,
}, context
render_widget = pretty_wrapper(render_widget, check_widget_name)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/setup.py | bloodhound_dashboard/setup.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from setuptools import setup
DESC = """Project dashboard for Apache(TM) Bloodhound.
Add custom dashboards in multiple pages of Bloodhound sites.
"""
versions = [
(0, 1, 0),
(0, 2, 0),
(0, 3, 0),
(0, 4, 0),
(0, 5, 0),
(0, 6, 0),
(0, 7, 0),
(0, 8, 0),
(0, 9, 0),
]
latest = '.'.join(str(x) for x in versions[-1])
status = {
'planning': "Development Status :: 1 - Planning",
'pre-alpha': "Development Status :: 2 - Pre-Alpha",
'alpha': "Development Status :: 3 - Alpha",
'beta': "Development Status :: 4 - Beta",
'stable': "Development Status :: 5 - Production/Stable",
'mature': "Development Status :: 6 - Mature",
'inactive': "Development Status :: 7 - Inactive"
}
dev_status = status["alpha"]
cats = [
dev_status,
"Environment :: Plugins",
"Environment :: Web Environment",
"Framework :: Trac",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Other Audience",
"Intended Audience :: System Administrators",
"License :: Unknown",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: WSGI",
"Topic :: Software Development :: Bug Tracking",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: User Interfaces",
"Topic :: Software Development :: Widget Sets"
]
# Add the change log to the package description.
chglog = None
try:
from os.path import dirname, join
chglog = open(join(dirname(__file__), "CHANGES"))
DESC += ('\n\n' + chglog.read())
finally:
if chglog:
chglog.close()
DIST_NM = 'BloodhoundDashboardPlugin'
PKG_INFO = {
'bhdashboard': ('bhdashboard', # Package dir
['../CHANGES', '../TODO', '../COPYRIGHT', # Package data
'../NOTICE', '../README', '../TESTING_README',
'htdocs/*.*', 'htdocs/css/*.css',
'htdocs/img/*.*', 'htdocs/js/*.js',
'templates/*', 'default-pages/*',
'locale/*/LC_MESSAGES/*.mo'],
),
'bhdashboard.widgets': ('bhdashboard/widgets', # Package dir
['templates/*', 'htdocs/*.css']), # Package data
'bhdashboard.layouts': ('bhdashboard/layouts', # Package dir
['templates/*']), # Package data
'bhdashboard.tests': ('bhdashboard/tests', # Package dir
['data/**']), # Package data
'bhdashboard.util': ('bhdashboard/util', # Package dir
[]), # Package data
}
ENTRY_POINTS = r"""
[trac.plugins]
bhdashboard.admin = bhdashboard.admin
bhdashboard.api = bhdashboard.api
bhdashboard.macros = bhdashboard.macros
bhdashboard.layouts.bootstrap = bhdashboard.layouts.bootstrap
bhdashboard.widgets.containers = bhdashboard.widgets.containers
bhdashboard.widgets.product = bhdashboard.widgets.product
bhdashboard.widgets.query = bhdashboard.widgets.query
bhdashboard.widgets.report = bhdashboard.widgets.report
bhdashboard.widgets.ticket = bhdashboard.widgets.ticket
bhdashboard.widgets.timeline = bhdashboard.widgets.timeline
bhdashboard.wiki = bhdashboard.wiki
"""
extra = {}
try:
from trac.util.dist import get_l10n_cmdclass
cmdclass = get_l10n_cmdclass()
if cmdclass:
extra['cmdclass'] = cmdclass
extractors = [
('**.py', 'trac.dist:extract_python', None),
('**/templates/**.html', 'genshi', None),
('**/templates/**.txt', 'genshi', {
'template_class': 'genshi.template:TextTemplate'
}),
]
extra['message_extractors'] = {
'bhdashboard': extractors,
}
except ImportError:
pass
setup(
name=DIST_NM,
version=latest,
description=DESC.split('\n', 1)[0],
author="Apache Bloodhound",
license="Apache License v2",
url="https://bloodhound.apache.org/",
requires=['trac'],
tests_require=['dutest>=0.2.4', 'TracXMLRPC'] +
['unittest2'] if sys.version_info < (2, 7) else [],
package_dir=dict([p, i[0]] for p, i in PKG_INFO.iteritems()),
packages=PKG_INFO.keys(),
package_data=dict([p, i[1]] for p, i in PKG_INFO.iteritems()),
include_package_data=True,
provides=['%s (%s)' % (p, latest) for p in PKG_INFO.keys()],
obsoletes=['%s (>=%s.0.0, <%s)' % (p, versions[-1][0], latest)
for p in PKG_INFO.keys()],
entry_points=ENTRY_POINTS,
classifiers=cats,
long_description=DESC,
**extra
)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/api.py | bloodhound_dashboard/bhdashboard/api.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
The core of the dashboard architecture.
"""
__metaclass__ = type
from datetime import date, time, datetime, timedelta
from inspect import isclass
from sys import version_info
from genshi.builder import tag
from trac.core import Component, ExtensionPoint, implements, \
Interface, TracError
from trac.perm import IPermissionRequestor
from trac.resource import get_resource_url, Resource, resource_exists
from trac.util.compat import set
from trac.util.datefmt import parse_date
from trac.web.chrome import add_stylesheet
from bhdashboard.util.translation import _
#--------------------------------------
# Core classes and interfaces
#--------------------------------------
class IWidgetProvider(Interface):
r"""Extension point interface for components providing widgets.
These may be seen as web parts more sophisticated than WikiMacro
as they expose much more meta-data, but more simple than gadgets
because they belong in the environment and are built on top of Trac
architecture. This makes them more suitable to be used in
environments where flexibility and configurability is needed
(i.e. dashboards).
"""
def get_widgets():
"""Return an iterable listing the names of the provided widgets."""
def get_widget_description(name):
"""Return plain text description of the widget with specified name."""
def get_widget_params(name):
"""Return a dictionary describing wigdet preference for the widget
with specified name. Used to customize widget behavior."""
def render_widget(name, context, options):
"""Render widget considering given options."""
# TODO: Add methods to specify widget metadata (e.g. parameters)
class ILayoutProvider(Interface):
"""Extension point interface implemented by components adding layouts
to the dashboard.
PS: Such components should implement `trac.mimeview.api.IContentConverter`
interface so as to save and load layout definition when necessary.
The pseudo-mimetype identifying layout data will be
`application/x-trac-layout-<layout_name>`.
Nonetheless they can delegate that task to other components too.
Let's all hail the Trac component model !
"""
def get_layouts():
"""Return an iterable listing the names of the provided layouts."""
def get_layout_description(name):
"""Return plain text description of the layout with specified name."""
def expand_layout(name, context, options):
"""Provide the information needed to render layout identified by
`name`.
:param context: rendering context
:param options: additional options supplied in so as to adapt layout
considering data specific to this request. This allows to
customize (parts of) the layout for a given request.
Suported options are :
:field schema: data to be used in order to populate layout
:field embed: embed layout inside another page (true / false)
"""
class DashboardSystem(Component):
implements(IPermissionRequestor, IWidgetProvider)
widget_providers = ExtensionPoint(IWidgetProvider)
layout_providers = ExtensionPoint(ILayoutProvider)
# IPermissionRequestor methods
def get_permission_actions(self):
return ['DASHBOARD_VIEW',
# 'DASHBOARD_CREATE', 'DASHBOARD_EDIT' <= Coming soon ;)
]
# IWidgetProvider methods
def get_widgets(self):
"""List the name of the widgets that will always be available
"""
yield 'WidgetDoc'
def get_widget_description(self, name):
"""Return plain text description of the widget with specified name.
"""
try:
return {
'WidgetDoc' : """Display widget documentation"""
}[name]
except KeyError:
raise InvalidIdentifier('Widget name MUST match any of ' +
', '.join(self.get_widgets()),
title='Invalid widget identifier')
def get_widget_params(self, name):
"""Return a dictionary describing wigdet preference for the widget
with specified name. Used to customize widget behavior.
"""
try:
return {
'WidgetDoc': {
'urn': {
'desc': """Widget name. If missing then """
"""documentation for all widgets """
"""will be displayed."""
}
}
}[name]
except KeyError:
raise InvalidIdentifier('Widget name MUST match any of ' +
', '.join(self.get_widgets()),
title='Invalid widget identifier')
def render_widget(self, name, context, options):
"""Render widget considering given options.
"""
if name == 'WidgetDoc':
add_stylesheet(context.req, 'dashboard/css/docs.css')
widget_name, = self.bind_params(options,
self.get_widget_params(name), 'urn')
if widget_name is not None:
try:
providers = [([widget_name],
self.resolve_widget(widget_name))]
except LookupError:
return 'widget_alert.html', {
'title': _('Widget documentation'),
'data': {
'msglabel': 'Alert',
'msgbody': 'Unknown identifier',
'msgdetails': [
('Widget name', widget_name)
]
}
}, context
else:
providers = [(provider.get_widgets(), provider) \
for provider in self.widget_providers]
metadata = [self._prepare_doc_metadata(self.widget_metadata(wnm, p)) \
for widgets, p in providers for wnm in widgets]
docs_resource = Resource('wiki', 'BloodhoundWidgets')
insert_docs = resource_exists(self.env, docs_resource) and \
not (context.resource and \
docs_resource == context.resource)
return 'widget_doc.html', {
'title': _('Widget documentation'),
'data': {
'items': metadata
},
'ctxtnav': [tag.a(tag.i(class_='icon-info-sign'),
' ', _('Help'),
href=get_resource_url(
self.env, docs_resource,
context.href)
)] if insert_docs else [],
}, context
else:
raise InvalidIdentifier('Widget name MUST match any of ' +
', '.join(self.get_widgets()),
title='Invalid widget identifier')
# Public API
def widget_metadata(self, nm, provider=None):
"""Retrieve widget metadata.
:param nm: Wid
get name
:param provider: Widget provider. If omitted it will be resolved.
"""
if provider is None:
provider = self.resolve_widget(nm)
return {
'urn': nm,
'desc': provider.get_widget_description(nm),
'params': provider.get_widget_params(nm),
}
def _prepare_doc_metadata(self, spec):
"""Transform widget metadata into a format suitable to render
documentation.
"""
def plabel(p):
v = p.get('type', str)
module = getattr(v, '__module__', None)
if module in (None, '__builtin__'):
return getattr(v, '__name__', None) or v
else:
# FIXME: Improve e.g. for enum fields
if not isclass(v):
v = v.__class__
return tag.span(v.__name__, title='in ' + module)
return {
'title': spec['urn'],
'desc': '\n'.join(l.strip()
for l in spec['desc'].splitlines()),
'sections': [
{
'title': _('Parameters'),
'entries': [
{
'caption': pnm,
'summary': '\n'.join(
l.strip() for l in \
p.get('desc').splitlines()),
'details': [
('Type', plabel(p)),
('Required', p.get('required',
False)),
('Default', p.get('default')),
]
}
for pnm, p in spec['params'].iteritems()]
}
]
}
def bind_params(self, options, spec, *params):
"""Extract values for widget arguments from `options` and ensure
they are valid and properly formatted.
"""
# Should this helper function be part of public API ?
def get_and_check(p):
try:
param_spec = spec[p]
except KeyError:
raise InvalidWidgetArgument("Unknown parameter `%s`" % (p,))
try:
argtype = param_spec.get('type') or unicode
return argtype(options['args'][p])
except KeyError:
if param_spec.get('required'):
raise InvalidWidgetArgument(p,
"Required parameter expected")
elif param_spec.get('default') is not None:
return param_spec['default']
else:
return None
return (get_and_check(param) for param in params)
def _resolve(self, objnm, epnm, accessor, errmsg='Invalid object name %s'):
"""Determine the provider implementing a given widget / layout / ...
:param objnm: name used to lookup provider
:param epnm: attribute name used for entry point
:param accessor: function used to determine names bound to provider
"""
for p in getattr(self, epnm):
if objnm in accessor(self, p):
return p
else:
raise LookupError(errmsg % (objnm,))
def resolve_layout(self, nm):
return self._resolve(nm, 'layout_providers',
lambda _, lp: lp.get_layouts() , "No provider for layout %s")
def resolve_widget(self, nm):
return self._resolve(nm, 'widget_providers',
lambda _, wp: wp.get_widgets() , "No provider for widget %s")
#--------------------------------------
# Exception classes
#--------------------------------------
# Maybe it is better to move these to a separate file
# (if this gets as big as it seems it will be)
class WidgetException(TracError):
"""Base class for all errors related to Trac widgets"""
class InvalidIdentifier(WidgetException):
"""Invalid value for a field used to identify an internal object"""
title = 'Invalid identifier'
class InvalidWidgetArgument(WidgetException):
"""Something went wrong with widget parameter"""
title = 'Invalid Argument'
def __init__(self, argname, message, title=None, show_traceback=False):
message = _("Invalid argument") + " `" + argname + "`. " + message
TracError.__init__(self, message, title, show_traceback)
self.argname = argname
def __unicode__(self):
return unicode(
self.message)
#--------------------------------------
# Default field types
#--------------------------------------
class DateField:
"""Convert datetime field
"""
def __init__(self, fmt="%Y-%m-%d %H:%M:%S", tz=None):
"""Initialize datetime field converter
:param fmt: format string used to interpret dates and times
"""
self.fmt = fmt
self.tz = tz
def __call__(self, value, fmt=None):
"""Perform the actual conversion
"""
if isinstance(value, (date, time, datetime, timedelta)):
return value
elif isinstance(value, basestring):
try:
return parse_date(value, self.tz)
except TracError, exc:
try:
fmt = fmt or self.fmt
return datetime.strptime(value, fmt)
except:
raise InvalidWidgetArgument(
error=exc, title=_('Datetime conversion error'))
elif isinstance(value, int):
return datetime.utcfromtimestamp(value)
else:
raise InvalidWidgetArgument(
"Invalid format `%s` for value `%s`" % (fmt, value),
title=_('Datetime conversion error'))
class ListField:
"""Convert list field
"""
def __init__(self, sep=','):
"""Initialize list field converter
:param sep: character used to delimit list items
"""
self.sep = sep
def __call__(self, value):
"""Perform the actual conversion
"""
if isinstance(value, basestring):
return value.split(self.sep)
else:
try:
return list(value)
except Exception, exc:
raise InvalidWidgetArgument(error=exc,
title=_('List conversion error'))
class EnumField:
"""Convert enum field
"""
def __init__(self, *choices):
"""Initialize enum field converter
:param choices: allowed values
"""
self.choices = set(choices)
def __call__(self, value):
"""Perform the actual conversion
"""
if value not in self.choices:
raise InvalidWidgetArgument('',
_('Expected one of `%s` but got `%s`') % (self.choices, value),
title=_('Enum conversion error'))
return value
class JsonField:
"""Deserialize JSON string
"""
def __init__(self):
"""Initialize JSON field converter
"""
# TODO: Add further options
def __call__(self, value):
"""Perform the actual conversion
"""
try:
if version_info < (2, 6):
from simplejson import loads
else:
from json import loads
except ImportError:
raise TracError('Unable to load library to parse JSON string')
else:
return loads(value)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/web_ui.py | bloodhound_dashboard/bhdashboard/web_ui.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Implementing dashboard user interface.
"""
__metaclass__ = type
import copy
import pkg_resources
import re
from uuid import uuid4
from genshi.builder import tag
from genshi.core import Stream
from trac.core import Component, implements
from trac.config import Option, IntOption
from trac.mimeview.api import Context
from trac.ticket.query import QueryModule
from trac.ticket.report import ReportModule
from trac.util.compat import groupby
from trac.web.api import IRequestHandler, IRequestFilter
from trac.web.chrome import add_ctxtnav, add_stylesheet, Chrome, \
INavigationContributor, ITemplateProvider
from bhdashboard.api import DashboardSystem, InvalidIdentifier
from bhdashboard import _json
from bhdashboard.util.translation import _, add_domain
from multiproduct.env import ProductEnvironment
class DashboardModule(Component):
"""Web frontend for dashboard infrastructure.
"""
implements(IRequestHandler, IRequestFilter, INavigationContributor,
ITemplateProvider)
mainnav_label = Option('mainnav', 'tickets.label', 'Tickets',
"""Dashboard label in mainnav""",
doc_domain='bhdashboard')
default_widget_height = IntOption('widgets', 'default_height', 320,
"""Default widget height in pixels""",
doc_domain='bhdashboard')
def __init__(self, *args, **kwargs):
locale_dir = pkg_resources.resource_filename(__name__, 'locale')
add_domain(self.env.path, locale_dir)
super(DashboardModule, self).__init__(*args, **kwargs)
# IRequestFilter methods
def pre_process_request(self, req, handler):
"""Always returns the request handler unchanged.
"""
return handler
def post_process_request(self, req, template, data, content_type):
"""Inject dashboard helpers in data.
"""
if data is not None:
data['bhdb'] = DashboardChrome(self.env)
if isinstance(req.perm.env, ProductEnvironment) \
and not 'resourcepath_template' in data \
and 'product_list' in data:
data['resourcepath_template'] = 'bh_path_general.html'
for item in req.chrome['nav'].get('mainnav', []):
self.log.debug('%s' % (item,))
if item['name'] == 'tickets':
item['label'] = tag.a(_(self.mainnav_label),
href=req.href.dashboard())
if item['active'] and \
not ReportModule(self.env).match_request(req):
add_ctxtnav(req, _('Reports'),
href=req.href.report())
break
return template, data, content_type
# IRequestHandler methods
def match_request(self, req):
"""Match dashboard prefix"""
return bool(re.match(r'^/dashboard(/.)?', req.path_info))
def process_request(self, req):
req.perm.require('PRODUCT_VIEW')
# Initially this will render static widgets. With time it will be
# more and more dynamic and flexible.
if self.env[QueryModule] is not None:
add_ctxtnav(req, _('Custom Query'), req.href.query())
if self.env[ReportModule] is not None:
add_ctxtnav(req, _('Reports'), req.href.report())
context = Context.from_request(req)
template, layout_data = self.expand_layout_data(context,
'bootstrap_grid',
self.DASHBOARD_SCHEMA if isinstance(self.env, ProductEnvironment)
else self.DASHBOARD_GLOBAL_SCHEMA
)
widgets = self.expand_widget_data(context, layout_data)
return template, {
'context': Context.from_request(req),
'layout': layout_data,
'widgets': widgets,
'title': _(self.mainnav_label),
'default': {'height': self.default_widget_height or None}
}, None
# INavigationContributor methods
def get_active_navigation_item(self, req):
"""Highlight dashboard mainnav item.
"""
return 'tickets'
def get_navigation_items(self, req):
"""Skip silently
"""
return None
# ITemplateProvider methods
def get_htdocs_dirs(self):
"""List `htdocs` dirs for dashboard and widgets.
"""
resource_filename = pkg_resources.resource_filename
return [('dashboard', resource_filename('bhdashboard', 'htdocs')),
#('widgets', resource_filename('bhdashboard.widgets', 'htdocs'))
('layouts', resource_filename('bhdashboard.layouts', 'htdocs'))]
def get_templates_dirs(self):
"""List `templates` folders for dashboard and widgets.
"""
resource_filename = pkg_resources.resource_filename
return [resource_filename('bhdashboard.layouts', 'templates'),
resource_filename('bhdashboard', 'templates'),
resource_filename('bhdashboard.widgets', 'templates')]
# Temp vars
DASHBOARD_GLOBAL_SCHEMA = DASHBOARD_SCHEMA = {
'div': [
{
'_class': 'row',
'div': [
{
'_class': 'span8',
'widgets': ['my tickets', 'active tickets',
'products', 'versions',
'milestones', 'components']
},
{
'_class': 'span4',
'widgets': ['activity']
}
]
}
],
'widgets': {
'components': {
'args': [
'TicketFieldValues',
None,
{'args': {
'field': 'component',
'title': 'Components',
'verbose': True}}]
},
'milestones': {
'args': [
'TicketFieldValues',
None,
{'args': {
'field': 'milestone',
'title': 'Milestones',
'verbose': True}}]
},
'versions': {
'args': [
'TicketFieldValues',
None,
{'args' : {
'field' : 'version',
'title' : 'Versions',
'verbose' : True}}]
},
'active tickets': {
'args': [
'TicketQuery',
None,
{'args': {
'max' : 10,
'query': 'status=!closed&group=milestone'
'&col=id&col=summary&col=owner'
'&col=status&col=priority&'
'order=priority',
'title': _('Active Tickets')}}],
'altlinks': False
},
'my tickets': {
'args': [
'TicketQuery',
None,
{'args': {
'max': 10,
'query': 'status=!closed&group=milestone'
'&col=id&col=summary&col=owner'
'&col=status&col=priority&'
'order=priority&'
'owner=$USER',
'title': _('My Tickets')}
}],
'altlinks': False
},
'activity': {
'args': ['Timeline', None, {'args': {}}]
},
'products': {
'args': ['Product', None, {'args': {'max': 3,
'cols': 2}}]
},
}
}
# global dashboard queries: add milestone column, group by product
DASHBOARD_GLOBAL_SCHEMA['widgets']['active tickets']['args'][2]['args']['query'] = (
'status=!closed&group=product&col=id&col=summary&col=owner&col=status&'
'col=priority&order=priority&col=milestone'
)
DASHBOARD_GLOBAL_SCHEMA['widgets']['my tickets']['args'][2]['args']['query'] = (
'status=!closed&group=product&col=id&col=summary&col=owner&col=status&'
'col=priority&order=priority&col=milestone&owner=$USER&'
)
for widget in ('milestones', 'versions', 'components'):
DASHBOARD_GLOBAL_SCHEMA['div'][0]['div'][0]['widgets'].remove(widget)
# Public API
def expand_layout_data(self, context, layout_name, schema, embed=False):
"""Determine the template needed to render a specific layout
and the data needed to place the widgets at expected
location.
"""
layout = DashboardSystem(self.env).resolve_layout(layout_name)
template = layout.expand_layout(layout_name, context, {
'schema': schema,
'embed': embed
})['template']
return template, schema
def _render_widget(self, wp, name, ctx, options):
"""Render widget without failing.
"""
if wp is None:
data = {'msglabel': _('Warning'),
'msgbody': _('Unknown widget %(name)s', name=name)}
return 'widget_alert.html', {'title': '', 'data': data}, ctx
try:
return wp.render_widget(name, ctx, options)
except Exception, exc:
log_entry = str(uuid4())
exccls = exc.__class__
self.log.exception(
"- %s - Error rendering widget %s with options %s",
log_entry, name, options)
data = {
'msgtype': 'error',
'msglabel': 'Error',
'msgbody': _('Exception raised while rendering widget. '
'Contact your administrator for further details.'),
'msgdetails': [
('Widget name', name),
('Exception type', tag.code(exccls.__name__)),
('Log entry ID', log_entry),
],
}
return 'widget_alert.html', {
'title': _('Widget error'),
'data': data
}, ctx
def expand_widget_data(self, context, schema):
"""Expand raw widget data and format it for use in template
"""
# TODO: Implement dynamic dashboard specification
widgets_spec = schema.get('widgets', {})
widgets_index = dict([wnm, wp]
for wp in DashboardSystem(self.env).widget_providers
for wnm in wp.get_widgets()
)
self.log.debug("Bloodhound: Widget index %s" % (widgets_index,))
for w in widgets_spec.itervalues():
w['c'] = widgets_index.get(w['args'][0])
w['args'][1] = context
self.log.debug("Bloodhound: Widget specs %s" % (widgets_spec,))
chrome = Chrome(self.env)
render = chrome.render_template
data_strm = ((k, w, self._render_widget(w['c'], *w['args']))
for k, w in widgets_spec.iteritems())
return dict([k, {'title': data['title'],
'content': render(wctx.req, template, data['data'],
fragment=True),
'ctxtnav': w.get('ctxtnav', True) and
data.get('ctxtnav') or None,
'altlinks': w.get('altlinks', True) and
data.get('altlinks') or None,
'visible': w['c'] is not None or
not w.get('hide_disabled', False)}
] for k, w, (template, data, wctx) in data_strm)
def alert_disabled(self):
return tag.div(tag.span('Error', class_='label label-important'),
' Could not load dashboard. Is ',
tag.code('bhdashboard.web_ui.DashboardModule'),
' component disabled ?',
class_='alert alert-error')
#------------------------------------------------------
# Dashboard Helpers to be used in templates
#------------------------------------------------------
XMLNS_DASHBOARD_UI = 'http://issues.apache.org/bloodhound/wiki/Ui/Dashboard'
class DashboardChrome:
"""Helper functions providing access to dashboard infrastructure
in Genshi templates. Useful to reuse layouts and widgets across
website.
"""
def __init__(self, env):
self.env = env
def embed_layout(self, context, layout, **kwargs):
"""Render layout and widgets
:param context: Rendering context
:param layout: Identifier of target layout
:param schema: Data describing widget positioning
:param widgets: Widgets definition
"""
dbmod = DashboardModule(self.env)
schema = kwargs.get('schema', {})
if isinstance(schema, basestring):
schema = _json.loads(schema)
widgets = kwargs.get('widgets')
if widgets is not None:
# TODO: Use this one once widgets markup parser will be ready
#widgets = parse_widgets_markup(widgets)
if isinstance(widgets, basestring):
widgets = _json.loads(widgets)
else:
widgets = {}
schema['widgets'] = widgets
template, layout_data = dbmod.expand_layout_data(context, layout,
schema, True)
widgets = dbmod.expand_widget_data(context, layout_data)
return Chrome(self.env).render_template(
context.req, template,
dict(context=context, layout=layout_data, widgets=widgets, title='',
default={'height': dbmod.default_widget_height or None}),
fragment=True)
def expand_widget(self, context, widget):
"""Render single widget
:param context: Rendering context
:param widget: Widget definition
"""
dbmod = DashboardModule(self.env)
options = widget['args'][2]
argsdef = options.get('args')
if isinstance(argsdef, basestring):
options['args'] = _json.loads(argsdef)
elif isinstance(argsdef, Stream):
options['args'] = parse_args_tag(argsdef)
return dbmod.expand_widget_data(context, {'widgets': {0: widget}})[0]
#------------------------------------------------------
# Stream processors
#------------------------------------------------------
def parse_args_tag(stream):
"""Parse Genshi Markup for widget arguments
"""
args = {}
inside = False
argnm = ''
argvalue = ''
for kind, data, _ in stream:
if kind == Stream.START:
qname, attrs = data
if qname.namespace == XMLNS_DASHBOARD_UI \
and qname.localname == 'arg':
if inside:
raise RuntimeError('Nested bh:arg tag')
else:
argnm = attrs.get('name')
argvalue = ''
inside = True
elif kind == Stream.TEXT:
argvalue += data
elif kind == Stream.END:
if qname.namespace == XMLNS_DASHBOARD_UI \
and qname.localname == 'arg':
args[argnm] = argvalue
inside = False
return args
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/macros.py | bloodhound_dashboard/bhdashboard/macros.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Bloodhound Macros"""
from genshi.builder import tag
from trac.util.translation import cleandoc_
from trac.wiki.api import WikiSystem
from trac.wiki.macros import WikiMacroBase
from bhdashboard.wiki import GUIDE_NAME
from bhdashboard.util.translation import _
class UserGuideTocMacro(WikiMacroBase):
_description = cleandoc_("""Display a Guide table of contents
This macro provides the table-of-contents specific to the user Guide
"""
)
TOC = [('%(guide)s/Index', 'Index'),
('%(guide)s/Install', 'Installation'),
('%(guide)s/InterfaceCustomization', 'Customization'),
('%(guide)s/Plugins', 'Plugins'),
('%(guide)s/Upgrade', 'Upgrading'),
('%(guide)s/Ini', 'Configuration'),
('%(guide)s/Admin', 'Administration'),
('%(guide)s/Backup', 'Backup'),
('%(guide)s/Logging', 'Logging'),
('%(guide)s/Permissions' , 'Permissions'),
('%(guide)s/Wiki', 'The Wiki'),
('WikiFormatting', 'Wiki Formatting'),
('%(guide)s/Timeline', 'Timeline'),
('%(guide)s/Browser', 'Repository Browser'),
('%(guide)s/RevisionLog', 'Revision Log'),
('%(guide)s/Changeset', 'Changesets'),
('%(guide)s/Tickets', 'Tickets'),
('%(guide)s/Workflow', 'Workflow'),
('%(guide)s/Roadmap', 'Roadmap'),
('%(guide)s/Query', 'Ticket Queries'),
('%(guide)s/BatchModify', 'Batch Modify'),
('%(guide)s/Reports', 'Reports'),
('%(guide)s/Rss', 'RSS Support'),
('%(guide)s/Notification', 'Notification'),
]
def expand_macro(self, formatter, name, args):
curpage = formatter.resource.id
# scoped TOC (e.g. TranslateRu/Guide or 0.X/Guide ...)
prefix = ''
guideprefix = GUIDE_NAME + '/'
data = {'guide': GUIDE_NAME,}
idx = curpage.find('/')
if idx > 0:
prefix = curpage[:idx+1]
if prefix.endswith(guideprefix):
prefix = prefix[:len(prefix)-len(guideprefix)]
ws = WikiSystem(self.env)
return tag.div(
tag.h4(_('Table of Contents')),
tag.ul([tag.li(tag.a(title,
href=formatter.href.wiki(prefix+ref % data),
class_=(not ws.has_page(prefix+ref % data) and
'missing')),
class_=(prefix+ref % data== curpage and 'active'))
for ref, title in self.TOC]),
class_='wiki-toc')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/admin.py | bloodhound_dashboard/bhdashboard/admin.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Administration commands for Bloodhound Dashboard.
"""
import json
import re
from sys import stdout
from trac.admin.api import IAdminCommandProvider, AdminCommandError
from trac.core import Component, implements
from trac.db_default import schema as tracschema
from trac.util.text import printout
from trac.wiki.admin import WikiAdmin
from trac.wiki.model import WikiPage
from bhdashboard import wiki
from bhdashboard.util.translation import _
try:
from multiproduct.model import Product, ProductResourceMap, ProductSetting
except ImportError:
Product = ProductResourceMap = ProductSetting = None
schema = tracschema[:]
if Product is not None:
schema.extend([Product._get_schema(), ProductResourceMap._get_schema(),
ProductSetting._get_schema()])
structure = dict([(table.name, [col.name for col in table.columns])
for table in schema])
# add product for any columns required
for table in ['ticket']:
structure[table].append('product')
# probably no point in keeping data from these tables
ignored = ['auth_cookie', 'session', 'session_attribute', 'cache']
IGNORED_DB_STRUCTURE = dict([(k, structure[k]) for k in ignored])
DB_STRUCTURE = dict([(k, structure[k]) for k in structure if k not in ignored])
class BloodhoundAdmin(Component):
"""Bloodhound administration commands.
"""
implements(IAdminCommandProvider)
# IAdminCommandProvider methods
def get_admin_commands(self):
"""List available commands.
"""
yield ('wiki bh-upgrade', '',
'Move Trac* wiki pages to %s/*' % wiki.GUIDE_NAME,
None, self._do_wiki_upgrade)
yield ('devfixture dump', '[filename]',
"""Dumps database to stdout in a form suitable for reloading
If a filename is not provided, data will be sent standard out.
""",
None, self._dump_as_fixture)
yield ('devfixture load', '<filename> <backedup>',
"""Loads database fixture from json dump file
You need to specify a filename and confirm that you have backed
up your data.
""",
None, self._load_fixture_from_file)
def _do_wiki_upgrade(self):
"""Move all wiki pages starting with Trac prefix to unbranded user
guide pages.
"""
wiki_admin = WikiAdmin(self.env)
pages = wiki_admin.get_wiki_list()
for old_name in pages:
if old_name.startswith('Trac'):
new_name = wiki.new_name(old_name)
if not new_name:
continue
if new_name in pages:
printout(_('Ignoring %(page)s : '
'The page %(new_page)s already exists',
page=old_name, new_page=new_name))
continue
try:
wiki_admin._do_rename(old_name, new_name)
except AdminCommandError, exc:
printout(_('Error moving %(page)s : %(message)s',
page=old_name, message=unicode(exc)))
else:
# On success, rename links in other pages
self._do_wiki_rename_links(old_name, new_name)
# On success, insert redirection page
redirection = WikiPage(self.env, old_name)
redirection.text = _('See [wiki:"%(name)s"].', name=new_name)
comment = 'Bloodhound guide update'
redirection.save('bloodhound', comment, '0.0.0.0')
self._do_wiki_rename_links('[[TracGuideToc]]', '[[UserGuideToc]]')
def _do_wiki_rename_links(self, old_name, new_name):
if old_name.startswith('[[') and old_name.endswith(']]'):
pattern = r'%s'
else:
pattern = r'\b%s\b'
with self.env.db_transaction as db:
pages = db("""SELECT name, text FROM wiki
WHERE text %s
""" % db.like(),
('%' + db.like_escape(old_name) + '%',))
for name, text in pages:
db("""UPDATE wiki
SET text=%s
WHERE name=%s
""", (re.sub(pattern % re.escape(old_name),
new_name, text), name))
def _get_tdump(self, db, table, fields):
"""Dumps all the data from a table for a known set of fields"""
return db("SELECT %s from %s" % (','.join([db.quote(f) for f in fields]),
db.quote(table)))
def _dump_as_fixture(self, *args):
"""Dumps database to a json fixture"""
def dump_json(fp):
"""Dump to json given a file"""
with self.env.db_query as db:
data = [(k, v, self._get_tdump(db, k, v))
for k, v in DB_STRUCTURE.iteritems()]
jd = json.dumps(data, sort_keys=True, indent=2,
separators=(',', ':'))
fp.write(jd)
if len(args):
f = open(args[0], mode='w+')
dump_json(f)
f.close()
else:
dump_json(stdout)
def _load_fixture_from_file(self, fname):
"""Calls _load_fixture with an open file"""
try:
fp = open(fname, mode='r')
self._load_fixture(fp)
fp.close()
except IOError:
printout(_("The file '%(fname)s' does not exist", fname=fname))
def _load_fixture(self, fp):
"""Extract fixture data from a file like object, expecting json"""
# Only delete if we think it unlikely that there is data to lose
with self.env.db_query as db:
if db('SELECT * FROM ' + db.quote('ticket')):
printout(_("This command is only intended to run on fresh "
"environments as it will overwrite the database.\n"
"If it is safe to lose bloodhound data, delete the "
"environment and re-run python bloodhound_setup.py "
"before attempting to load the fixture again."))
return
data = json.load(fp)
with self.env.db_transaction as db:
for tab, cols, vals in data:
db("DELETE FROM " + db.quote(tab))
for tab, cols, vals in data:
printout("Populating %s table" % tab)
db.executemany("INSERT INTO %s (%s) VALUES (%s)"
% (db.quote(tab),
','.join([db.quote(c) for c in cols]),
','.join(['%s']*len(cols))),
vals)
printout("%d records added" % len(vals))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/_json.py | bloodhound_dashboard/bhdashboard/_json.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Provide a single namespace to access JSON functions.
"""
try :
from json import *
except ImportError:
from simplejson import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/model.py | bloodhound_dashboard/bhdashboard/model.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from trac.db import Table, Column
from trac.core import TracError
from trac.resource import ResourceNotFound, ResourceSystem
from trac.ticket.api import TicketSystem
def dict_to_kv_str(env, data=None, sep=' AND '):
"""Converts a dictionary into a string and a list suitable for using as part
of an SQL where clause like:
('key0=%s AND key1=%s', ['value0','value1'])
The sep argument allows ' AND ' to be changed for ',' for UPDATE purposes
"""
if data is None:
return ('', [])
qfn = env.get_read_db().quote
return (sep.join('%s=%%s' % qfn(k) for k in data.keys()),
data.values())
def fields_to_kv_str(env, fields, data, sep=' AND '):
"""Converts a list of fields and a dictionary containing those fields into a
string and a list suitable for using as part of an SQL where clause like:
('key0=%s,key1=%s', ['value0','value1'])
"""
return dict_to_kv_str(env, dict((f, data[f]) for f in fields),sep)
class ModelBase(object):
"""Base class for the models to factor out common features
Derived classes should provide a meta dictionary to describe the table like:
_meta = {'table_name':'mytable',
'object_name':'WhatIWillCallMyselfInMessages',
'key_fields':['id','id2'],
'non_key_fields':[
'thing',
{
name:"field_name_x",
type='int64',
size=None,
key_size=None,
auto_increment=False
}],
'auto_inc_fields': ['id',],
}
key_fields and non_key_fields parameters may contain field name only (for
text columns) or dict with detailed column specification. In case of
detailed column specification 'name' parameter is obligatory).
"""
def __init__(self, env, keys=None):
"""Initialisation requires an environment to be specified.
If keys are provided, the Model will initialise from the database
"""
# make this impossible to instantiate without telling the class details
# about itself in the self.meta dictionary
self._old_data = {}
self._data = {}
self._exists = False
self._env = env
self._key_fields = self._get_field_names(self._meta['key_fields'])
self._non_key_fields = self._get_field_names(
self._meta['non_key_fields'])
self._all_fields = self._key_fields + self._non_key_fields
self._unique_fields = self._meta['unique_fields']
self._auto_inc_fields = self._get_auto_inc_field_names()
if keys is not None:
self._get_row(keys)
else:
self._update_from_row(None)
def update_field_dict(self, field_dict):
"""Updates the object's copy of the db fields (no db transaction)"""
self._data.update(field_dict)
def __getattr__(self, name):
"""Overridden to allow table.field style field access."""
try:
if name in self._all_fields:
return self._data[name]
except KeyError:
raise AttributeError(name)
raise AttributeError(name)
def __setattr__(self, name, value):
"""Overridden to allow table.field = value style field setting."""
data = self.__dict__.get('_data')
fields = self.__dict__.get('_all_fields')
if data and fields and name in fields:
self._data[name] = value
else:
dict.__setattr__(self, name, value)
@classmethod
def get_table_name(cls):
return cls._meta["table_name"]
def _update_from_row(self, row = None):
"""uses a provided database row to update the model"""
fields = self._all_fields
self._exists = row is not None
if row is None:
row = [None]*len(fields)
self._data = dict([(fields[i], row[i]) for i in range(len(row))])
self._old_data = {}
self._old_data.update(self._data)
def _get_row(self, keys):
"""queries the database and stores the result in the model"""
row = None
where, values = fields_to_kv_str(self._env, self._key_fields, keys)
fields = ','.join(self._all_fields)
sdata = {'fields':fields,
'where':where}
sdata.update(self._meta)
sql = """SELECT %(fields)s FROM %(table_name)s
WHERE %(where)s""" % sdata
with self._env.db_query as db:
for row in db(sql, values):
self._update_from_row(row)
break
else:
raise ResourceNotFound(
('No %(object_name)s with %(where)s' % sdata)
% tuple(values))
def delete(self):
"""Deletes the matching record from the database"""
if not self._exists:
raise TracError('%(object_name)s does not exist' % self._meta)
where, values = fields_to_kv_str(self._env, self._key_fields,
self._data)
sdata = {'where': where}
sdata.update(self._meta)
sql = """DELETE FROM %(table_name)s
WHERE %(where)s""" % sdata
with self._env.db_transaction as db:
db(sql, values)
self._exists = False
TicketSystem(self._env).reset_ticket_fields()
ResourceSystem(self._env).resource_deleted(self)
self._data = dict([(k, None) for k in self._data.keys()])
self._old_data.update(self._data)
def insert(self):
"""Create new record in the database"""
sdata = None
if self._exists or len(self.select(self._env, where =
dict([(k,self._data[k])
for k in self._key_fields]))):
sdata = {'keys':','.join(["%s='%s'" % (k, self._data[k])
for k in self._key_fields])}
elif self._unique_fields and len(self.select(self._env, where =
dict([(k,self._data[k])
for k in self._unique_fields]))):
sdata = {'keys':','.join(["%s='%s'" % (k, self._data[k])
for k in self._unique_fields])}
if sdata:
sdata.update(self._meta)
sdata['values'] = self._data
raise TracError('%(object_name)s %(keys)s already exists %(values)s' %
sdata)
for key in self._key_fields:
if self._data[key] is None and key not in self._auto_inc_fields:
sdata = {'key':key}
sdata.update(self._meta)
raise TracError('%(key)s required for %(object_name)s' %
sdata)
fields = [field for field in self._all_fields
if field not in self._auto_inc_fields]
sdata = {'fields':','.join(fields),
'values':','.join(['%s'] * len(fields))}
sdata.update(self._meta)
sql = """INSERT INTO %(table_name)s (%(fields)s)
VALUES (%(values)s)""" % sdata
with self._env.db_transaction as db:
cursor = db.cursor()
cursor.execute(sql, [self._data[f] for f in fields])
for auto_in_field in self._auto_inc_fields:
self._data[auto_in_field] = db.get_last_id(
cursor, sdata["table_name"], auto_in_field)
self._exists = True
self._old_data.update(self._data)
TicketSystem(self._env).reset_ticket_fields()
ResourceSystem(self._env).resource_created(self)
def _update_relations(self, db, author=None):
"""Extra actions due to update"""
pass
def update(self, author=None):
"""Update the matching record in the database"""
if self._old_data == self._data:
return
if not self._exists:
raise TracError('%(object_name)s does not exist' % self._meta)
for key in self._meta['no_change_fields']:
if self._data[key] != self._old_data[key]:
raise TracError('%s cannot be changed' % key)
for key in self._key_fields + self._unique_fields:
if self._data[key] != self._old_data[key]:
if len(self.select(self._env, where = {key:self._data[key]})):
raise TracError('%s already exists' % key)
setsql, setvalues = fields_to_kv_str(self._env, self._non_key_fields,
self._data, sep=',')
where, values = fields_to_kv_str(self._env, self._key_fields,
self._data)
sdata = {'where': where,
'values': setsql}
sdata.update(self._meta)
sql = """UPDATE %(table_name)s SET %(values)s
WHERE %(where)s""" % sdata
old_values = dict((k, v) for k, v in self._old_data.iteritems()
if self._data.get(k) != v)
with self._env.db_transaction as db:
db(sql, setvalues + values)
self._update_relations(db, author)
self._old_data.update(self._data)
TicketSystem(self._env).reset_ticket_fields()
ResourceSystem(self._env).resource_changed(self, old_values)
@classmethod
def select(cls, env, db=None, where=None, limit=None, order_by=None):
"""
Query the database to get a set of records back
* order_by: is list of fields with optional sort direction
("asc" or "desc") e.g. ["field1", "field2 desc"]
"""
rows = []
fields = cls._get_all_field_names()
sdata = {'fields': ','.join(env.get_read_db().quote(f)
for f in fields),}
sdata.update(cls._meta)
sql = r'SELECT %(fields)s FROM %(table_name)s' % sdata
wherestr, values = dict_to_kv_str(env, where)
if wherestr:
wherestr = ' WHERE ' + wherestr
final_sql = sql + wherestr
if limit is not None:
final_sql += ' LIMIT ' + str(int(limit))
if order_by:
final_sql += "\nORDER BY " + ', '.join(order_by)
for row in env.db_query(final_sql, values):
# we won't know which class we need until called
model = cls.__new__(cls)
data = dict([(fields[i], row[i]) for i in range(len(fields))])
model.__init__(env, data)
rows.append(model)
return rows
@classmethod
def _get_all_field_names(cls):
return cls._get_field_names(
cls._meta['key_fields']+cls._meta['non_key_fields'])
@classmethod
def _get_field_names(cls, field_specs):
def get_field_name(field_spec):
if isinstance(field_spec, dict):
return field_spec["name"]
return field_spec
return [get_field_name(field_spec) for field_spec in field_specs]
@classmethod
def _get_all_field_columns(cls):
auto_inc = cls._meta.get('auto_inc_fields', [])
columns = []
all_fields_spec = cls._meta['key_fields'] + cls._meta['non_key_fields']
for field_spec in all_fields_spec:
#field_spec can be field name string or dictionary with detailed
#column specification
if isinstance(field_spec, dict):
column_spec = field_spec
else:
column_spec = dict(
name = field_spec,
auto_increment=field_spec in auto_inc)
columns.append(column_spec)
return columns
@classmethod
def _get_auto_inc_field_names(cls):
return [field_spec["name"] for field_spec
in cls._get_all_field_columns()
if field_spec.get("auto_increment")]
@classmethod
def _get_schema(cls):
"""Generate schema from the class meta data"""
fields = [Column(
column_spec["name"],
type=column_spec.get("type", "text"),
size=column_spec.get("size"),
key_size=column_spec.get("key_size"),
auto_increment=column_spec.get("auto_increment", False))
for column_spec in cls._get_all_field_columns()]
return Table(cls._meta['table_name'], key=set(cls._meta['key_fields'] +
cls._meta['unique_fields'])) [fields]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/__init__.py | bloodhound_dashboard/bhdashboard/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Add custom dashboards in multiple pages of Bloodhound sites.
"""
# Ignore errors to avoid Internal Server Errors
from trac.core import TracError
TracError.__str__ = lambda self: unicode(self).encode('ascii', 'ignore')
try:
from bhdashboard import *
msg = 'Ok'
except Exception, exc:
# raise
msg = "Exception %s raised: '%s'" % (exc.__class__.__name__, str(exc))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/wiki.py | bloodhound_dashboard/bhdashboard/wiki.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
WikiMacros and WikiProcessors related to dashboard system.
"""
from ast import literal_eval
from genshi.builder import tag
from trac.web.chrome import Chrome
from trac.wiki.api import parse_args
from trac.wiki.macros import WikiMacroBase
from bhdashboard.web_ui import DashboardChrome, DashboardModule
GUIDE_NAME = 'Guide'
RENAME_MAP = {'TracGuide': GUIDE_NAME + '/Index',}
def new_name(name, force=False):
if name.startswith('Trac'):
return RENAME_MAP.get(name, GUIDE_NAME + '/' + name[4:])
else:
return name
class WidgetMacro(WikiMacroBase):
"""Embed Bloodhound widgets using WikiFormatting.
"""
#: A gettext domain to translate the macro description
_domain = None
#: A macro description
_description = """Embed Bloodhound widgets using WikiFormatting."""
def expand_macro(self, formatter, name, content):
"""Render widget contents by re-using wiki markup implementation
"""
if self.env[DashboardModule] is None:
return DashboardModule(self.env).alert_disabled()
largs, kwargs = parse_args(content, strict=True)
try:
(widget_name ,) = largs
except ValueError:
template = 'widget_alert.html'
data = {
'msgtype' : 'error',
'msglabel' : 'Error',
'msgbody' : tag('Expected ', tag.code(1),
' positional argument (i.e. widget name), but got ',
tag.code(len(largs)), ' instead'),
'msgdetails' : [
('Macro name', tag.code('WidgetMacro')),
('Arguments', ', '.join(largs) if largs \
else tag.span('None', class_='label')),
],
}
else:
widget_name = widget_name.strip()
wopts = {} ; wargs = {}
def parse_literal(value):
try:
return literal_eval(value)
except (SyntaxError, ValueError):
return value
for argnm, value in kwargs.iteritems():
if argnm.startswith('wo_'):
wopts[argnm[3:]] = value
else :
wargs[argnm] = parse_literal(value)
template = 'widget.html'
data = {
'args' : wargs,
'bhdb' : DashboardChrome(self.env),
'id' : None,
'opts' : wopts,
'widget' : widget_name
}
return Chrome(self.env).render_template(
formatter.req, template, data, fragment=True)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/util/widgets.py | bloodhound_dashboard/bhdashboard/util/widgets.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widget helpers functions and classes.
"""
import inspect
from trac.core import Component, implements
from trac.util.text import to_unicode
from bhdashboard.api import DashboardSystem, IWidgetProvider, InvalidIdentifier
#------------------------------------------------------
# Widget helpers
#------------------------------------------------------
class WidgetBase(Component):
"""Abstract base class for widgets"""
implements(IWidgetProvider)
abstract = True
def get_widgets(self):
"""Yield the name of the widget based on the class name."""
name = self.__class__.__name__
if name.endswith('Widget'):
name = name[:-6]
yield name
def get_widget_description(self, name):
"""Return the subclass's docstring."""
return to_unicode(inspect.getdoc(self.__class__))
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
raise NotImplementedError
def render_widget(self, context, name, options):
"""Render widget considering given options."""
raise NotImplementedError
# Helper methods
def bind_params(self, name, options, *params):
return DashboardSystem(self.env).bind_params(options,
self.get_widget_params(name), *params)
def check_widget_name(f):
"""Decorator used to wrap methods of widget providers so as to ensure
widget names will match those listed by `get_widgets` method.
"""
def widget_name_checker(self, name, *args, **kwargs):
names = set(self.get_widgets())
if name not in names:
raise InvalidIdentifier('Widget name MUST match any of ' +
', '.join(names),
title='Invalid widget identifier')
return f(self, name, *args, **kwargs)
return widget_name_checker
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/util/translation.py | bloodhound_dashboard/bhdashboard/util/translation.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Translation functions and classes.
"""
from trac.util.translation import domain_functions
#------------------------------------------------------
# Internationalization
#------------------------------------------------------
_, ngettext, tag_, tagn_, gettext, N_, add_domain = \
domain_functions('bhdashboard', ('_', 'ngettext', 'tag_', 'tagn_',
'gettext', 'N_', 'add_domain'))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/util/__init__.py | bloodhound_dashboard/bhdashboard/util/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Helper functions and classes.
"""
from functools import update_wrapper
from pkg_resources import get_distribution
from urlparse import urlparse
from wsgiref.util import setup_testing_defaults
from trac.core import ExtensionPoint
from trac.web.api import Request
from trac.web.chrome import add_link, Chrome
from trac.web.main import RequestDispatcher
#------------------------------------------------------
# Request handling
#------------------------------------------------------
def dummy_request(env, uname=None):
environ = {}
setup_testing_defaults(environ)
environ.update({
'REQUEST_METHOD' : 'GET',
'SCRIPT_NAME' : urlparse(str(env._abs_href())).path,
'trac.base_url' : str(env._abs_href()),
})
req = Request(environ, lambda *args, **kwds: None)
# Intercept redirection
req.redirect = lambda *args, **kwds: None
# Setup user information
if uname is not None :
environ['REMOTE_USER'] = req.authname = uname
rd = RequestDispatcher(env)
chrome = Chrome(env)
req.callbacks.update({
'authname': rd.authenticate,
'chrome': chrome.prepare_request,
'hdf': getattr(rd, '_get_hdf', None),
'lc_time': rd._get_lc_time,
'locale' : getattr(rd, '_get_locale', None),
'perm': rd._get_perm,
'session': rd._get_session,
'tz': rd._get_timezone,
'form_token': rd._get_form_token
})
return req
def merge_links(srcreq, dstreq, exclude=None):
"""Incorporate links in `srcreq` into `dstreq`.
"""
if exclude is None:
exclude = ['alternate']
if 'links' in srcreq.chrome:
for rel, links in srcreq.chrome['links'].iteritems():
if rel not in exclude:
for link in links:
add_link(dstreq, rel, **link)
#------------------------------------------------------
# Function decorators and wrappers
#------------------------------------------------------
def pretty_wrapper(wrapped, *decorators):
"""Apply multiple decorators to a given function and make the result
look like wrapped function.
"""
wrapper = wrapped
for f in decorators:
wrapper = f(wrapper)
return update_wrapper(wrapper, wrapped)
#------------------------------------------------------
# Trac core
#------------------------------------------------------
def resolve_ep_class(interface, component, clsnm, **kwargs):
r"""Retrieve the class implementing an interface (by name)
"""
ep = ExtensionPoint(interface)
for c in ep.extensions(component):
if c.__class__.__name__ == clsnm :
return c
else:
if 'default' in kwargs:
return kwargs['default']
else:
raise LookupError('No match found for class %s implementing %s' %
(clsnm, interface) )
#------------------------------------------------------
# Context information
#------------------------------------------------------
trac_version = tuple(int(i) for i in get_distribution('Trac').parsed_version \
if i.startswith('0'))
# The exact moments (versions) where some things started to change
# in such a manner that break previous test code
trac_tags = (
(0, 13), # TODO: Find the exact version ( Trac=0.12 ? )
)
#------------------------------------------------------
# Miscellaneous
#------------------------------------------------------
def minmax(seq, accessor=lambda x: x):
"""Retrieve lower and upper bounds in a squence
"""
minval = maxval = None
seq = iter(seq)
try:
minval = maxval = accessor(seq.next())
except StopIteration:
pass
for x in seq:
value = accessor(x)
if value > maxval:
maxval = value
if value < minval:
minval = value
return dict(min=minval, max=maxval)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/tests/test_webui.py | bloodhound_dashboard/bhdashboard/tests/test_webui.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
In this file you'll find part of the tests written to ensure that
dashboard web module works as expected.
Only the tests requiring minimal setup effort are included below.
This means that the environment used to run these tests contains the
barely minimal information included in an environment (i.e. only the
data specified by `trac.db_default.get_data`.).
Once the tests are started all built-in components (except
trac.versioncontrol.* ) as well as widget system and extensions
are loaded. Besides the following values are (auto-magically)
made available in the global namespace (i.e. provided that
the test name be written like `|widget_name: Descriptive message`):
- __tester__ An instance of `unittest.TestCase` representing the
test case for the statement under test. Useful
when specific assertions (e.g. `assertEquals`)
are needed.
- req A dummy request object setup for anonymous access.
- auth_req A dummy request object setup like if user `murphy` was
accessing the site.
- env the Trac environment used as a stub for testing purposes.
This object is an instance of
`bhdashboard.tests.EnvironmentStub`.
- ticket_data A set of tickets used for testing purposes.
"""
#------------------------------------------------------
# Test artifacts
#------------------------------------------------------
import sys
from bhdashboard.tests import trac_version, trac_tags
def test_suite():
from doctest import NORMALIZE_WHITESPACE, ELLIPSIS, REPORT_UDIFF
from dutest import MultiTestLoader
from unittest import defaultTestLoader
from bhdashboard.tests import DocTestTracLoader, ticket_data
magic_vars = dict(ticket_data=ticket_data)
if trac_version < (0, 13): # FIXME: Should it be (0, 12) ?
kwargs = {'enable': ['trac.[a-uw-z]*', 'tracrpc.*', 'bhdashboard.*']}
else:
kwargs = {
'enable': ['trac.*', 'tracrpc.*', 'bhdashboard.*'],
'disable': ['trac.versioncontrol.*']
}
l = MultiTestLoader(
[defaultTestLoader,
DocTestTracLoader(extraglobs=magic_vars,
default_data=True,
optionflags=ELLIPSIS | REPORT_UDIFF |
NORMALIZE_WHITESPACE,
**kwargs)
])
return l.loadTestsFromModule(sys.modules[__name__])
#------------------------------------------------------
# Helper functions
#------------------------------------------------------
from datetime import datetime, time, date
from itertools import izip
from pprint import pprint
from bhdashboard.tests import clear_perm_cache
def prepare_ticket_workflow(tcktrpc, ticket_data, auth_req):
r"""Set ticket status considering the actions defined in standard
ticket workflow. Needed for TracRpc>=1.0.6
"""
from time import sleep
TICKET_ACTIONS = {'accepted': 'accept', 'closed': 'resolve',
'assigned': 'reassign'}
sleep(1)
for idx, (_, __, td) in enumerate(ticket_data):
action = TICKET_ACTIONS.get(td.get('status'))
if action is not None:
aux_attrs = {'action': action}
aux_attrs.update(td)
tcktrpc.update(auth_req, idx + 1, "", aux_attrs)
sleep(1)
for idx, (_, __, td) in enumerate(ticket_data):
tcktrpc.update(auth_req, idx + 1, "", td)
from bhdashboard.web_ui import DashboardModule
__test__ = {
'Initialization: Report widgets' : r"""
""",
'Rendering templates' : r"""
>>> dbm = DashboardModule(env)
>>> from trac.mimeview.api import Context
>>> context = Context.from_request(auth_req)
#FIXME: This won't work. Missing schema
>>> pprint(dbm.expand_widget_data(context))
[{'content': <genshi.core.Stream object at ...>,
'title': <Element "a">}]
""",
}
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/tests/__init__.py | bloodhound_dashboard/bhdashboard/tests/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__metaclass__ = type
import sys
import trac.test
from trac.core import ComponentMeta
from trac.db.api import _parse_db_str, DatabaseManager
from trac.mimeview.api import Context
from trac.util.compat import set
from bhdashboard.util import trac_version, trac_tags
#------------------------------------------------------
# Trac environments used for testing purposes
#------------------------------------------------------
class EnvironmentStub(trac.test.EnvironmentStub):
r"""Enhanced stub of the trac.env.Environment object for testing.
"""
# Dont break lazy evaluation. Otherwise RPC calls misteriously fail.
@property
def _abs_href(self):
return self.abs_href
def enable_component(self, clsdef):
r"""Enable a plugin temporarily at testing time.
"""
if trac_version < trac_tags[0]:
# `enabled_components` should be enough in this case
if clsdef not in self.enabled_components:
self.enabled_components.append(clsdef)
else:
# Use environment configuration otherwise
raise NotImplementedError("TODO: Enable components in Trac>=0.13")
def disable_component(self, clsdef):
r"""Disable a plugin temporarily at testing time.
"""
if trac_version < trac_tags[0]:
try:
self.enabled_components.remove(clsdef)
except ValueError:
self.log.warning("Component %s was not enabled", clsdef)
else:
# Use environment configuration otherwise
raise NotImplementedError("TODO: Disable components in Trac>=0.13")
def rip_component(self, cls):
r"""Disable a plugin forever and RIP it using the super-laser beam.
"""
self.disable_component(cls)
for reg in ComponentMeta._registry.itervalues():
try:
reg.remove(cls)
except ValueError:
pass
if not hasattr(trac.test.EnvironmentStub, 'reset_db'):
# Copycat trac.test.EnvironmentStub.reset_db (Trac=0.11.5)
def reset_db(self, default_data=None):
r"""Remove all data from Trac tables, keeping the tables themselves.
:param default_data: after clean-up, initialize with default data
:return: True upon success
"""
from trac import db_default
db = self.get_db_cnx()
db.rollback() # make sure there's no transaction in progress
cursor = db.cursor()
defdata = list(db_default.get_data(db))
for table, cols, vals in defdata:
cursor.execute("DELETE FROM %s" % (table,))
db.commit()
if default_data:
for table, cols, vals in defdata:
cursor.executemany("INSERT INTO %s (%s) VALUES (%s)"
% (table, ','.join(cols),
','.join(['%s' for c in cols])),
vals)
else:
cursor.execute("INSERT INTO system (name, value) "
"VALUES (%s, %s)",
('database_version', str(db_default.db_version)))
db.commit()
#------------------------------------------------------
# Minimalistic testing framework for Trac
#------------------------------------------------------
from dutest import DocTestLoader, DocTestSuiteFixture
from os.path import dirname
from types import MethodType
from bhdashboard.util import dummy_request
# Hide this module from tracebacks written into test results.
__unittest = True
class DocTestTracLoader(DocTestLoader):
r"""A generic XUnit loader that allows to load doctests written
to check that Trac plugins behave as expected.
"""
def set_env(self, env):
if self.extraglobs is None:
self.extraglobs = dict(env=env)
else:
self.extraglobs['env'] = env
env = property(lambda self: self.extraglobs.get('env'), set_env, \
doc="""The Trac environment used in doctests.""")
del set_env
def __init__(self, dt_finder=None, globs=None, extraglobs=None, \
load=None, default_data=False, enable=None, \
disable=None, **opts):
r"""Initialization. It basically works like `DocTestLoader`'s
initializer but creates also the Trac environment used for
testing purposes. The default behavior is to create an instance
of `EnvironmentStub` class. Subclasses can add more specific
keyword parameters in order to use them to create the
environment. Next it loads (and | or) enables the components
needed by the test suite.
The following variables are magically available at testing time.
They can be used directly in doctests :
- req A dummy request object setup for anonymous access.
- auth_req A dummy request object setup like if user `murphy` was
accessing the site.
- env the Trac environment used as a stub for testing
purposes (i.e. `self.env`).
@param dt_finder see docs for `DocTestLoader.__init__`
method.
@param globs see docs for `DocTestLoader.__init__`
method.
@param extraglobs see docs for `DocTestLoader.__init__`
method.
@param load a list of packages containing components
that will be loaded to ensure they are
available at testing time. It should be
the top level module in that package
(e.g. 'trac').
@param default_data If true, populate the database with some
defaults. This parameter has to be
handled by `createTracEnv` method.
@param enable a list of UNIX patterns specifying which
components need to be enabled by default
at testing time. This parameter should be
handled by `createTracEnv` method.
@param disable a list of UNIX patterns specifying which
components need to be disabled by default
at testing time. Ignored in Trac<=0.11 .
This parameter should be
handled by `createTracEnv` method.
"""
super(DocTestTracLoader, self).__init__(dt_finder, globs, \
extraglobs, **opts)
if trac_version >= trac_tags[0]:
opts['disable'] = disable
self.env = self.createTracEnv(default_data, enable, **opts)
self.load_components(load is None and self.default_packages or load)
# Load trac built-in components by default
default_packages = ['trac']
def createTracEnv(self, default_data=False, enable=None,
disable=None, **params):
r"""Create the Trac environment used for testing purposes. The
default behavior is to create an instance of `EnvironmentStub`
class. Subclasses can override this decision and add more specific
keyword parameters in order to control environment creation in
more detail.
All parameters supplied at initialization time. By default they
are ignored.
@param default_data If True, populate the database with some
defaults.
@param enable a list of UNIX patterns specifying which
components need to be enabled by default
at testing time.
@param disable a list of UNIX patterns specifying which
components need to be disabled by default
at testing time. Ignored in Trac<0.13
@return the environment used for testing purpose.
"""
if trac_version >= trac_tags[0]:
kwargs = {'disable': disable}
else:
kwargs = {}
return EnvironmentStub(default_data, enable, **kwargs)
def load_components(self, pkgs):
r"""Load some packages to ensure that the components they
implement are available at testing time.
"""
from trac.loader import load_components
for pkg in pkgs:
try:
__import__(pkg)
except ImportError:
pass # Skip pkg. What a shame !
else:
mdl = sys.modules[pkg]
load_components(self.env, dirname(dirname(mdl.__file__)))
class doctestSuiteClass(DocTestSuiteFixture):
r"""Prepare the global namespace before running all doctests
in the suite. Reset the Trac environment.
"""
username = 'murphy'
@property
def env(self):
r"""The Trac environment involved in this test. It is
retrieved using the global namespace ;o).
"""
return self.globalns['env']
def new_request(self, uname=None, args=None):
r"""Create and initialize a new request object.
"""
req = dummy_request(self.env, uname)
if args is not None:
req.args = args
return req
def setUp(self):
r"""Include two (i.e. `req` anonymous and `auth_req`
authenticated) request objects in the global namespace, before
running the doctests. Besides, clean up environment data and
include only default data.
"""
from pprint import pprint
from trac.core import ComponentMeta
globs = self.globalns
req = self.new_request(args=dict())
auth_req = self.new_request(uname=self.username, args=dict())
globs['req'] = req
globs['auth_req'] = auth_req
# TODO: If the source docstrings belong to a Trac component,
# then instantiate it and include in the global
# namespace.
# Delete data in Trac tables
from trac import db_default
db = self.env.get_db_cnx()
cursor = db.cursor()
for table in db_default.schema:
if trac_version < trac_tags[0]: # FIXME: Should it be (0, 12) ?
cursor.execute("DELETE FROM " + table.name)
else:
cursor.execute("DROP TABLE " + table.name)
db.commit()
self.env.reset_db(default_data=True)
#------------------------------------------------------
# Test artifacts used to test widget providers
#------------------------------------------------------
from bhdashboard.api import InvalidIdentifier
class DocTestWidgetLoader(DocTestTracLoader):
r"""Load doctests used to test Trac RPC handlers.
"""
class doctestSuiteClass(DocTestTracLoader.doctestSuiteClass):
r"""Include the appropriate RPC handler in global namespace
before running all test cases in the suite.
"""
def ns_from_name(self):
r"""Extract the target namespace under test using the name
of the DocTest instance manipulated by the suite.
"""
try:
return self._dt.name.split(':', 1)[0].split('|', 1)[-1]
except:
return None
def partial_setup(self):
r"""Perform partial setup due to some minor failure (e.g.
namespace missing in test name).
"""
globs = self.globalns
globs['widget'] = globs['ctx'] = globs['auth_ctx'] = None
def setup_widget(self, widgetns):
r"""(Insert | update) the IWidgetProvider in the global
namespace.
@param widgetns widget name.
@throws RuntimeError if a widget with requested name cannot
be found.
"""
globs = self.globalns
globs['ctx'] = Context.from_request(globs['req'])
globs['auth_ctx'] = Context.from_request(globs['auth_req'])
for wp in self.dbsys.providers:
if widgetns in set(wp.get_widgets()):
globs['widget'] = wp
break
else:
raise InvalidIdentifier(
'Cannot load widget provider for %s' % widgetns)
def setUp(self):
r"""Include the appropriate widget provider in global namespace
before running all test cases in the suite. In this case three
objects are added to the global namespace :
- `widget` the component implementing the widget under test
- `ctx` context used to render the widget for
anonymous user
- `auth_ctx` context used to render the widget for
authenticated user
"""
# Fail here if BloodhoundDashboardPlugin is not available. Thus
# this fact will be reported as a failure and subsequent test
# cases will be run anyway.
from bhdashboard.api import DashboardSystem
self.dbsys = DashboardSystem(self.env)
# Add request objects
DocTestTracLoader.doctestSuiteClass.setUp(self)
widgetns = self.ns_from_name()
if widgetns is None:
# TODO: If doctests belong to a widget provider class then
# instantiate it. In the mean time ...
self.partial_setup()
else:
try:
self.setup_widget(widgetns)
except InvalidIdentifier:
self.partial_setup()
# Load trac built-in components and RPC handlers by default
default_packages = ['trac']
#------------------------------------------------------
# Helper functions used in test cases
#------------------------------------------------------
def clear_perm_cache(_env, _req):
r"""Ensure that cache policies will not prevent test cases from
altering user permissions right away.
"""
from trac.perm import PermissionSystem, DefaultPermissionPolicy
_req.perm._cache.clear() # Clear permission cache
for policy in PermissionSystem(_env).policies:
if isinstance(policy, DefaultPermissionPolicy):
policy.permission_cache.clear() # Clear policy cache
break
#------------------------------------------------------
# Global test data
#------------------------------------------------------
from ConfigParser import RawConfigParser
from pkg_resources import resource_stream
def load_test_data(key):
r"""Load data used for testing purposes. Currently such data is
stored in .INI files inside `data` directory.
@param key currently the path to the file containing the
data, relative to `data` folder.
"""
fo = resource_stream(__name__, 'data/%s.ini' % key)
try:
p = RawConfigParser()
p.readfp(fo)
for section in p.sections():
yield section, dict(p.items(section))
finally:
fo.close()
# The set of tickets used by test cases.
ticket_data = [(attrs.pop('summary'), attrs.pop('description'), attrs) \
for _, attrs in sorted(load_test_data('ticket_data'))]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/tests/test_report.py | bloodhound_dashboard/bhdashboard/tests/test_report.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
In this file you'll find part of the tests written to ensure that
widgets displaying contents generated by TracReports behave as expected.
Only the tests requiring minimal setup effort are included below.
This means that the environment used to run these tests contains the
barely minimal information included in an environment (i.e. only the
data specified by `trac.db_default.get_data`.).
Once the tests are started all built-in components (except
trac.versioncontrol.* ) as well as widget system and extensions
are loaded. Besides the following values are (auto-magically)
made available in the global namespace (i.e. provided that
the test name be written like `|widget_name: Descriptive message`):
- __tester__ An instance of `unittest.TestCase` representing the
test case for the statement under test. Useful
when specific assertions (e.g. `assertEquals`)
are needed.
- req A dummy request object setup for anonymous access.
- auth_req A dummy request object setup like if user `murphy` was
accessing the site.
- env the Trac environment used as a stub for testing purposes.
This object is an instance of
`bhdashboard.tests.EnvironmentStub`.
- widget the widget provider under test.
- ctx context used to render widget for anonymous user.
- ctx_auth context used to render widget for `murphy` user.
- ticket_data A set of tickets used for testing purposes.
"""
#------------------------------------------------------
# Test artifacts
#------------------------------------------------------
import sys
from bhdashboard.tests import trac_version
def test_suite():
from doctest import NORMALIZE_WHITESPACE, ELLIPSIS, REPORT_UDIFF
from dutest import MultiTestLoader
from unittest import defaultTestLoader
from bhdashboard.tests import DocTestWidgetLoader, ticket_data
magic_vars = dict(ticket_data=ticket_data)
if trac_version < (0, 13): # FIXME: Should it be (0, 12) ?
kwargs = {'enable': ['trac.[a-uw-z]*', 'tracrpc.*', 'bhdashboard.*']}
else:
kwargs = {
'enable': ['trac.*', 'tracrpc.*', 'bhdashboard.*'],
'disable': ['trac.versioncontrol.*']
}
l = MultiTestLoader(
[defaultTestLoader,
DocTestWidgetLoader(extraglobs=magic_vars,
default_data=True,
optionflags=ELLIPSIS | REPORT_UDIFF |
NORMALIZE_WHITESPACE,
**kwargs)
])
return l.loadTestsFromModule(sys.modules[__name__])
#------------------------------------------------------
# Helper functions
#------------------------------------------------------
from datetime import datetime, time, date
from itertools import izip
from pprint import pprint
from bhdashboard.tests import clear_perm_cache
def print_report_metadata(report_desc):
for attrnm in ('id', 'title', 'description', 'query'):
print attrnm.capitalize()
print '-' * len(attrnm)
print report_desc[attrnm]
def print_report_columns(cols):
for coldsc in cols:
print 'Column:', coldsc[0], 'Type:', coldsc[1] or '_', \
'Label:',
try:
print coldsc[2] or '_'
except IndexError:
print '_'
def print_report_result(cols, data):
for i, row in enumerate(data):
print '= Row', i, '='
for coldsc in cols:
colnm = coldsc[0]
print 'Column:', colnm, 'Value:', row.get(colnm) or None, ''
TICKET_ATTRS = ('summary', 'description', 'priority',
'milestone', 'type', 'owner', 'status',
'component', 'version')
def prepare_ticket_workflow(tktrpc, ticket_data, auth_req):
r"""Set ticket status considering the actions defined in standard
ticket workflow. Needed for TracRpc>=1.0.6
"""
from time import sleep
TICKET_ACTIONS = {'accepted': 'accept', 'closed': 'resolve',
'assigned': 'reassign'}
sleep(1)
for idx, (_, __, td) in enumerate(ticket_data):
action = TICKET_ACTIONS.get(td.get('status'))
if action is not None:
aux_attrs = {'action': action}
aux_attrs.update(td)
tktrpc.update(auth_req, idx + 1, "", aux_attrs)
sleep(1)
for idx, (_, __, td) in enumerate(ticket_data):
tktrpc.update(auth_req, idx + 1, "", td)
__test__ = {
'Initialization: Report widgets': r"""
>>> from trac.core import ComponentMeta
>>> from bhdashboard.api import IWidgetProvider
>>> from bhdashboard.widgets.report import *
>>> allcls = ComponentMeta._registry.get(IWidgetProvider, [])
>>> [wpcls in allcls for wpcls in (TicketReportWidget,)]
[True]
""",
'|TicketReport: Metadata': r"""
>>> list(widget.get_widgets())
['TicketReport']
>>> params = widget.get_widget_params('TicketReport')
>>> pprint(params)
{'id': {'desc': 'Report number',
'required': True,
'type': <type 'int'>},
'page': {'default': 1,
'desc': 'Retrieve results in given page.',
'type': <type 'int'>},
'user': {'desc': 'Render the report for a given user.'}}
""",
'|TicketReport: Render My Tickets report': r"""
Add tickets
>>> from tracrpc.ticket import TicketRPC
>>> tcktrpc = TicketRPC(env)
>>> for td in ticket_data :
... tcktrpc.create(auth_req, *td)
...
1
2
3
4
5
6
7
8
9
>>> if all(tcktrpc.get(auth_req, tid)[-1].get('status') == 'new' \
... for tid in xrange(1, 10)):
... # RPC considers ticket workflow
... prepare_ticket_workflow(tcktrpc, ticket_data, auth_req)
...
Check everything is ok with tickets
>>> for tid in xrange(1, 10):
... d = tcktrpc.get(auth_req, tid)[-1]
... print tuple(d.get(attr) or '' for attr in TICKET_ATTRS)
...
(u'Ticket 1', u'Description 1', u'major', u'milestone1',
u'defect', u'murphy', u'accepted', u'component1', u'1.0')
(u'Ticket 2', u'Description 2', u'major', u'milestone4',
u'task', u'murphy', u'accepted', '', '')
(u'Ticket 3', u'Description 3', u'critical', u'milestone3',
u'enhancement', u'tester', u'new', '', u'2.0')
(u'Ticket 4', u'Description 4', u'minor', u'milestone3',
u'task', u'murphy', u'closed', u'component1', u'1.0')
(u'Ticket 5', u'Description 5', u'minor', u'milestone3',
u'task', u'murphy', u'new', '', u'2.0')
(u'Ticket 6', u'Description 6', u'minor', u'milestone1',
u'task', u'tester', u'assigned', u'component2', u'1.0')
(u'Ticket 7', u'Description 7', u'critical', '', u'enhancement',
u'murphy', u'closed', '', '')
(u'Ticket 8', u'Description 8', u'major', '', u'task',
u'murphy', u'closed', u'component1', '')
(u'Ticket 9', u'Description 9', u'minor', '', u'enhancement',
u'tester', u'closed', '', u'2.0')
>>> pprint(widget.render_widget('TicketReport', ctx, {
... 'args' : {'id' : 7}
... }))
...
('widget_grid.html',
{'data': {'action': 'view',
'args': {'USER': 'anonymous'},
'context': <...Context <Resource u'report:7'>>,
'description': u'\nThis report demonstrates the use of the automatically set \nUSER dynamic variable, replaced with the username of the\nlogged in user when executed.\n',
'email_map': {},
'header_groups': [[{'asc': False,
'col': u'__color__',
'hidden': True,
'title': u'Color'},
{'asc': False,
'col': u'__group__',
'hidden': True,
'title': u'Group'},
{'asc': False,
'col': u'ticket',
'hidden': False,
'title': u'Ticket'},
{'asc': False,
'col': u'summary',
'hidden': False,
'title': u'Summary'},
{'asc': False,
'col': u'component',
'hidden': False,
'title': u'Component'},
{'asc': False,
'col': u'version',
'hidden': False,
'title': u'Version'},
{'asc': False,
'col': u'milestone',
'hidden': False,
'title': u'Milestone'},
{'asc': False,
'col': u'type',
'hidden': False,
'title': u'Type'},
{'asc': False,
'col': u'priority',
'hidden': False,
'title': u'Priority'},
{'asc': False,
'col': u'created',
'hidden': False,
'title': u'Created'},
{'asc': False,
'col': u'_changetime',
'hidden': True,
'title': u'Changetime'},
{'asc': False,
'col': u'_description',
'hidden': True,
'title': u'Description'},
{'asc': False,
'col': u'_reporter',
'hidden': True,
'title': u'Reporter'}]],
'message': None,
'numrows': 0,
'paginator': <trac.util.presentation.Paginator object at ...>,
'report': {'id': 7, 'resource': <Resource u'report:7'>},
'row_groups': [],
'sorting_enabled': False,
'title': u'{7} My Tickets'},
'title': <Element "a">},
<...Context <Resource u'report:7'>>)
>>> template, data, rptctx = widget.render_widget('TicketReport', auth_ctx, {
... 'args' : {'id' : 7}
... })
...
>>> data = data['data']
>>> template
'widget_grid.html'
>>> rptctx.parent is auth_ctx
True
In Trac=0.13 (0.12 ?) My Tickets report adds another group
So perform common check in here ...
>>> reported_tickets = None
>>> for x in data['row_groups']:
... if x[0] == 'Reported':
... reported_tickets = x
... break
...
>>> if reported_tickets:
... data['row_groups'].remove(reported_tickets)
...
>>> pprint(data)
{'action': 'view',
'args': {'USER': 'murphy'},
'context': <Context <Resource u'report:7'>>,
'description': u'\nThis report demonstrates the use of the automatically set \nUSER dynamic variable, replaced with the username of the\nlogged in user when executed.\n',
'email_map': {},
'header_groups': [[{'asc': False,
'col': u'__color__',
'hidden': True,
'title': u'Color'},
{'asc': False,
'col': u'__group__',
'hidden': True,
'title': u'Group'},
{'asc': False,
'col': u'ticket',
'hidden': False,
'title': u'Ticket'},
{'asc': False,
'col': u'summary',
'hidden': False,
'title': u'Summary'},
{'asc': False,
'col': u'component',
'hidden': False,
'title': u'Component'},
{'asc': False,
'col': u'version',
'hidden': False,
'title': u'Version'},
{'asc': False,
'col': u'milestone',
'hidden': False,
'title': u'Milestone'},
{'asc': False,
'col': u'type',
'hidden': False,
'title': u'Type'},
{'asc': False,
'col': u'priority',
'hidden': False,
'title': u'Priority'},
{'asc': False,
'col': u'created',
'hidden': False,
'title': u'Created'},
{'asc': False,
'col': u'_changetime',
'hidden': True,
'title': u'Changetime'},
{'asc': False,
'col': u'_description',
'hidden': True,
'title': u'Description'},
{'asc': False,
'col': u'_reporter',
'hidden': True,
'title': u'Reporter'}]],
'message': None,
'numrows': 3,
'paginator': <trac.util.presentation.Paginator object at ...>,
'report': {'id': 7, 'resource': <Resource u'report:7'>},
'row_groups': [(u'Accepted',
[{u'__color__': u'3',
'__idx__': 0,
'cell_groups': [[{'header': {'asc': False,
'col': u'__color__',
'hidden': True,
'title': u'Color'},
'index': 0,
'value': u'3'},
{'header': {'asc': False,
'col': u'__group__',
'hidden': True,
'title': u'Group'},
'index': 1,
'value': u'Accepted'},
{'header': {'asc': False,
'col': u'ticket',
'hidden': False,
'title': u'Ticket'},
'index': 2,
'value': u'1'},
{'header': {'asc': False,
'col': u'summary',
'hidden': False,
'title': u'Summary'},
'index': 3,
'value': u'Ticket 1'},
{'header': {'asc': False,
'col': u'component',
'hidden': False,
'title': u'Component'},
'index': 4,
'value': u'component1'},
{'header': {'asc': False,
'col': u'version',
'hidden': False,
'title': u'Version'},
'index': 5,
'value': u'1.0'},
{'header': {'asc': False,
'col': u'milestone',
'hidden': False,
'title': u'Milestone'},
'index': 6,
'value': u'milestone1'},
{'header': {'asc': False,
'col': u'type',
'hidden': False,
'title': u'Type'},
'index': 7,
'value': u'defect'},
{'header': {'asc': False,
'col': u'priority',
'hidden': False,
'title': u'Priority'},
'index': 8,
'value': u'major'},
{'header': {'asc': False,
'col': u'created',
'hidden': False,
'title': u'Created'},
'index': 9,
'value': u'...'},
{'header': {'asc': False,
'col': u'_changetime',
'hidden': True,
'title': u'Changetime'},
'index': 10,
'value': u'...'},
{'header': {'asc': False,
'col': u'_description',
'hidden': True,
'title': u'Description'},
'index': 11,
'value': u'Description 1'},
{'header': {'asc': False,
'col': u'_reporter',
'hidden': True,
'title': u'Reporter'},
'index': 12,
'value': u'murphy'}]],
'id': u'1',
'resource': <Resource u'ticket:1'>},
{u'__color__': u'3',
'__idx__': 1,
'cell_groups': [[{'header': {'asc': False,
'col': u'__color__',
'hidden': True,
'title': u'Color'},
'index': 0,
'value': u'3'},
{'header': {'asc': False,
'col': u'__group__',
'hidden': True,
'title': u'Group'},
'index': 1,
'value': u'Accepted'},
{'header': {'asc': False,
'col': u'ticket',
'hidden': False,
'title': u'Ticket'},
'index': 2,
'value': u'2'},
{'header': {'asc': False,
'col': u'summary',
'hidden': False,
'title': u'Summary'},
'index': 3,
'value': u'Ticket 2'},
{'header': {'asc': False,
'col': u'component',
'hidden': False,
'title': u'Component'},
'index': 4,
'value': ''},
{'header': {'asc': False,
'col': u'version',
'hidden': False,
'title': u'Version'},
'index': 5,
'value': ''},
{'header': {'asc': False,
'col': u'milestone',
'hidden': False,
'title': u'Milestone'},
'index': 6,
'value': u'milestone4'},
{'header': {'asc': False,
'col': u'type',
'hidden': False,
'title': u'Type'},
'index': 7,
'value': u'task'},
{'header': {'asc': False,
'col': u'priority',
'hidden': False,
'title': u'Priority'},
'index': 8,
'value': u'major'},
{'header': {'asc': False,
'col': u'created',
'hidden': False,
'title': u'Created'},
'index': 9,
'value': u'...'},
{'header': {'asc': False,
'col': u'_changetime',
'hidden': True,
'title': u'Changetime'},
'index': 10,
'value': u'...'},
{'header': {'asc': False,
'col': u'_description',
'hidden': True,
'title': u'Description'},
'index': 11,
'value': u'Description 2'},
{'header': {'asc': False,
'col': u'_reporter',
'hidden': True,
'title': u'Reporter'},
'index': 12,
'value': u'murphy'}]],
'id': u'2',
'resource': <Resource u'ticket:2'>}]),
(u'Owned',
[{u'__color__': u'4',
'__idx__': 2,
'cell_groups': [[{'header': {'asc': False,
'col': u'__color__',
'hidden': True,
'title': u'Color'},
'index': 0,
'value': u'4'},
{'header': {'asc': False,
'col': u'__group__',
'hidden': True,
'title': u'Group'},
'index': 1,
'value': u'Owned'},
{'header': {'asc': False,
'col': u'ticket',
'hidden': False,
'title': u'Ticket'},
'index': 2,
'value': u'5'},
{'header': {'asc': False,
'col': u'summary',
'hidden': False,
'title': u'Summary'},
'index': 3,
'value': u'Ticket 5'},
{'header': {'asc': False,
'col': u'component',
'hidden': False,
'title': u'Component'},
'index': 4,
'value': ''},
{'header': {'asc': False,
'col': u'version',
'hidden': False,
'title': u'Version'},
'index': 5,
'value': u'2.0'},
{'header': {'asc': False,
'col': u'milestone',
'hidden': False,
'title': u'Milestone'},
'index': 6,
'value': u'milestone3'},
{'header': {'asc': False,
'col': u'type',
'hidden': False,
'title': u'Type'},
'index': 7,
'value': u'task'},
{'header': {'asc': False,
'col': u'priority',
'hidden': False,
'title': u'Priority'},
'index': 8,
'value': u'minor'},
{'header': {'asc': False,
'col': u'created',
'hidden': False,
'title': u'Created'},
'index': 9,
'value': u'...'},
{'header': {'asc': False,
'col': u'_changetime',
'hidden': True,
'title': u'Changetime'},
'index': 10,
'value': u'...'},
{'header': {'asc': False,
'col': u'_description',
'hidden': True,
'title': u'Description'},
'index': 11,
'value': u'Description 5'},
{'header': {'asc': False,
'col': u'_reporter',
'hidden': True,
'title': u'Reporter'},
'index': 12,
'value': u'murphy'}]],
'id': u'5',
'resource': <Resource u'ticket:5'>}])],
'sorting_enabled': False,
'title': u'{7} My Tickets'}
... and check for Trac=0.13 in here ;)
>>> if trac_version < trac_tags[0]:
... __tester__.assertEquals(reported_tickets, None)
... else:
... __tester__.assertEquals(
... [x['ticket'] for x in reported_tickets[1]],
... [3, 6]
... )
...
""",
'|TicketReport: Render a subset of My Tickets report': r"""
Add tickets
>>> from tracrpc.ticket import TicketRPC
>>> tcktrpc = TicketRPC(env)
>>> for td in ticket_data :
... tcktrpc.create(auth_req, *td)
...
1
2
3
4
5
6
7
8
9
>>> if all(tcktrpc.get(auth_req, tid)[-1].get('status') == 'new' \
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/tests/widgets/timeline.py | bloodhound_dashboard/bhdashboard/tests/widgets/timeline.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from bhdashboard.widgets.timeline import TicketFieldTimelineFilter
from trac.test import EnvironmentStub, Mock
from trac.ticket import Ticket
class TicketFieldTimelineFilterTests(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
t1 = self._insert_and_load_ticket("foo")
self.filter = TicketFieldTimelineFilter(self.env)
self.context = Mock(resource=t1.resource)
def tearDown(self):
self.env.reset_db()
def test_returns_none_for_invalid_ticket_id(self):
event = ['ticket', None, None, ['88']]
result = self.filter.filter_event(self.context, None, event, None)
self.assertIsNone(result)
def test_long_resource_id(self):
"""Test resource with long id (#547)"""
resource = self.context.resource
resource.id = long(resource.id)
event = ['ticket', None, None, [resource]]
result = self.filter.filter_event(self.context, None, event, None)
self.assertEqual(result, event)
def _insert_and_load_ticket(self, summary, **kw):
ticket = Ticket(self.env)
ticket["summary"] = summary
for k, v in kw.items():
ticket[k] = v
return Ticket(self.env, ticket.insert())
def suite():
return unittest.makeSuite(TicketFieldTimelineFilterTests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/tests/widgets/__init__.py | bloodhound_dashboard/bhdashboard/tests/widgets/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from bhdashboard.tests.widgets import timeline
def suite():
suite = unittest.TestSuite()
suite.addTest(timeline.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/report.py | bloodhound_dashboard/bhdashboard/widgets/report.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying report data.
"""
from datetime import datetime, date, time
from itertools import imap, islice
from genshi.builder import tag
from trac.core import implements, TracError
from trac.mimeview.api import Context
from trac.resource import ResourceNotFound
from trac.ticket.report import ReportModule
from trac.web.api import RequestDone
from bhdashboard.util import dummy_request, merge_links, \
pretty_wrapper, trac_version, \
trac_tags
from bhdashboard.util.widgets import WidgetBase, InvalidIdentifier, \
check_widget_name
from bhdashboard.util.translation import _
class TicketReportWidget(WidgetBase):
"""Display tickets in saved report using a grid
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'id' : {
'desc' : """Report number""",
'required' : True,
'type' : int,
},
'page' : {
'default' : 1,
'desc' : """Retrieve results in given page.""",
'type' : int,
},
'user' : {
'desc' : """Render the report for a given user.""",
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Execute stored report and render data using a grid
"""
data = None
req = context.req
try:
params = ('id', 'page', 'user')
rptid, page, user = self.bind_params(name, options, *params)
user = user or req.authname
fakereq = dummy_request(self.env, req.authname)
fakereq.args = {'page' : page, 'user' : user}
del fakereq.redirect # raise RequestDone as usual
rptmdl = self.env[ReportModule]
if rptmdl is None :
raise TracError('Report module not available (disabled?)')
if trac_version < trac_tags[0]:
args = fakereq, self.env.get_db_cnx(), rptid
else:
args = fakereq, rptid
data = rptmdl._render_view(*args)[1]
except ResourceNotFound, exc:
raise InvalidIdentifier(unicode(exc))
except RequestDone:
raise TracError('Cannot execute report. Redirection needed')
except TracError, exc:
if data is not None:
exc.title = data.get('title', 'TracReports')
raise
else:
title = data.get('title', '%s {%s}' % (_('Report'), rptid))
rptctx = Context.from_request(fakereq, 'report', rptid)
return 'widget_grid.html', \
{
'title' : title,
'data' : data,
'ctxtnav' : [
tag.a(_('More'), href=req.href('report', rptid)),
('REPORT_MODIFY' in req.perm(rptctx.resource)) and \
tag.a(_('Edit'), href=req.href('report', rptid, action='edit')) or None,
],
'altlinks' : fakereq.chrome.get('links', {}).get('alternate')
}, \
rptctx
render_widget = pretty_wrapper(render_widget, check_widget_name)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/timeline.py | bloodhound_dashboard/bhdashboard/widgets/timeline.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying timeline data.
"""
from datetime import datetime, date, time, timedelta
from itertools import imap, islice
from types import MethodType
from genshi.builder import tag
from trac.core import Component, ExtensionPoint, implements, Interface, \
TracError
from trac.config import IntOption
from trac.mimeview.api import RenderingContext
from trac.resource import Resource, resource_exists, ResourceNotFound
from trac.timeline.web_ui import TimelineModule
from trac.ticket.api import TicketSystem
from trac.ticket.model import Ticket
from trac.ticket.web_ui import TicketModule
from trac.util.datefmt import utc
from trac.web.chrome import add_stylesheet
from bhdashboard.api import DateField, EnumField, ListField
from bhdashboard.util import dummy_request, merge_links, pretty_wrapper, \
trac_version, trac_tags
from bhdashboard.util.widgets import WidgetBase, InvalidIdentifier, \
check_widget_name
from bhdashboard.util.translation import _, tag_
__metaclass__ = type
class ITimelineEventsFilter(Interface):
"""Filter timeline events displayed in a rendering context
"""
def supported_providers():
"""List supported timeline providers. Filtering process will take
place only for the events contributed by listed providers.
Return `None` and all events contributed by all timeline providers
will be processed.
"""
def filter_event(context, provider, event, filters):
"""Decide whether a timeline event is relevant in a rendering context.
:param context: rendering context, used to determine events scope
:param provider: provider contributing event
:param event: target event
:param filters: active timeline filters
:return: the event resulting from the filtering process or
`None` if it has to be removed from the event stream or
`NotImplemented` if the filter doesn't care about it.
"""
class TimelineWidget(WidgetBase):
"""Display activity feed.
"""
default_count = IntOption('widget_activity', 'limit', 25,
"""Maximum number of items displayed by default""",
doc_domain='bhdashboard')
event_filters = ExtensionPoint(ITimelineEventsFilter)
_filters_map = None
@property
def filters_map(self):
"""Quick access to timeline events filters to be applied for a
given timeline provider.
"""
if self._filters_map is None:
self._filters_map = {}
for _filter in self.event_filters:
providers = _filter.supported_providers()
if providers is None:
providers = [None]
for p in providers:
self._filters_map.setdefault(p, []).append(_filter)
return self._filters_map
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'from': {
'desc': """Display events before this date""",
'type': DateField(), # TODO: Custom datetime format
},
'daysback': {
'desc': """Event time window""",
'type': int,
},
'precision': {
'desc': """Time precision""",
'type': EnumField('second', 'minute', 'hour')
},
'doneby': {
'desc': """Filter events related to user""",
},
'filters': {
'desc': """Event filters""",
'type': ListField()
},
'max': {
'desc': """Limit the number of events displayed""",
'type': int
},
'realm': {
'desc': """Resource realm. Used to filter events""",
},
'id': {
'desc': """Resource ID. Used to filter events""",
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Gather timeline events and render data in compact view
"""
data = None
req = context.req
try:
timemdl = self.env[TimelineModule]
admin_page = tag.a(_("administration page."),
title=_("Plugin Administration Page"),
href=req.href.admin('general/plugin'))
if timemdl is None:
return 'widget_alert.html', {
'title': _("Activity"),
'data': {
'msglabel': _("Warning"),
'msgbody':
tag_("The TimelineWidget is disabled because the "
"Timeline component is not available. "
"Is the component disabled? "
"You can enable from the %(page)s",
page=admin_page),
'dismiss': False,
}
}, context
params = ('from', 'daysback', 'doneby', 'precision', 'filters',
'max', 'realm', 'id')
start, days, user, precision, filters, count, realm, rid = \
self.bind_params(name, options, *params)
if context.resource.realm == 'ticket':
if days is None:
# calculate a long enough time daysback
ticket = Ticket(self.env, context.resource.id)
ticket_age = datetime.now(utc) - ticket.time_created
days = ticket_age.days + 1
if count is None:
# ignore short count for ticket feeds
count = 0
if count is None:
count = self.default_count
fakereq = dummy_request(self.env, req.authname)
fakereq.args = {
'author': user or '',
'daysback': days or '',
'max': count,
'precision': precision,
'user': user
}
if filters:
fakereq.args.update(dict((k, True) for k in filters))
if start is not None:
fakereq.args['from'] = start.strftime('%x %X')
wcontext = context.child()
if (realm, rid) != (None, None):
# Override rendering context
resource = Resource(realm, rid)
if resource_exists(self.env, resource) or \
realm == rid == '':
wcontext = context.child(resource)
wcontext.req = req
else:
self.log.warning("TimelineWidget: Resource %s not found",
resource)
# FIXME: Filter also if existence check is not conclusive ?
if resource_exists(self.env, wcontext.resource):
module = FilteredTimeline(self.env, wcontext)
self.log.debug('Filtering timeline events for %s',
wcontext.resource)
else:
module = timemdl
data = module.process_request(fakereq)[1]
except TracError, exc:
if data is not None:
exc.title = data.get('title', _('Activity'))
raise
else:
merge_links(srcreq=fakereq, dstreq=req,
exclude=["stylesheet", "alternate"])
if 'context' in data:
# Needed for abbreviated messages in widget events (#340)
wcontext.set_hints(**(data['context']._hints or {}))
data['context'] = wcontext
return 'widget_timeline.html', {
'title': _('Activity'),
'data': data,
'altlinks': fakereq.chrome.get('links', {}).get('alternate')
}, context
render_widget = pretty_wrapper(render_widget, check_widget_name)
class FilteredTimeline:
"""This is a class (not a component ;) aimed at overriding some parts of
TimelineModule without patching it in order to inject code needed to filter
timeline events according to rendering context. It acts as a wrapper on top
of TimelineModule.
"""
def __init__(self, env, context, keep_mismatched=False):
"""Initialization
:param env: Environment object
:param context: Rendering context
"""
self.env = env
self.context = context
self.keep_mismatched = keep_mismatched
# Access to TimelineModule's members
process_request = TimelineModule.__dict__['process_request']
_provider_failure = TimelineModule.__dict__['_provider_failure']
_event_data = TimelineModule.__dict__['_event_data']
_max_daysback = TimelineModule.max_daysback
@property
def max_daysback(self):
return (-1 if self.context.resource.realm == 'ticket'
else self._max_daysback)
@property
def event_providers(self):
"""Introduce wrappers around timeline event providers in order to
filter event streams.
"""
for p in TimelineModule(self.env).event_providers:
yield TimelineFilterAdapter(p, self.context, self.keep_mismatched)
def __getattr__(self, attrnm):
"""Forward attribute access request to TimelineModule
"""
try:
value = getattr(TimelineModule(self.env), attrnm)
if isinstance(value, MethodType):
raise AttributeError()
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (self.__class__.__name__, attrnm))
else:
return value
class TimelineFilterAdapter:
"""Wrapper class used to filter timeline event streams transparently.
Therefore it is compatible with `ITimelineEventProvider` interface
and reuses the implementation provided by real provider.
"""
def __init__(self, provider, context, keep_mismatched=False):
"""Initialize wrapper object by providing real timeline events provider.
"""
self.provider = provider
self.context = context
self.keep_mismatched = keep_mismatched
# ITimelineEventProvider methods
def get_timeline_filters(self, req):
gen = self.provider.get_timeline_filters(req)
if self.context.resource.realm == 'ticket' and \
isinstance(self.provider, TicketModule) and \
'TICKET_VIEW' in req.perm:
# ensure ticket_details appears once if this is a query on a ticket
gen = list(gen)
if not [g for g in gen if g[0] == 'ticket_details']:
gen.append(('ticket_details', _("Ticket updates"), False))
return gen
#def render_timeline_event(self, context, field, event):
def get_timeline_events(self, req, start, stop, filters):
"""Filter timeline events according to context.
"""
filters_map = TimelineWidget(self.env).filters_map
evfilters = filters_map.get(self.provider.__class__.__name__, []) + \
filters_map.get(None, [])
self.log.debug('Applying filters %s for %s against %s', evfilters,
self.context.resource, self.provider)
if evfilters:
for event in self.provider.get_timeline_events(
req, start, stop, filters):
match = False
for f in evfilters:
new_event = f.filter_event(self.context, self.provider,
event, filters)
if new_event is None:
event = None
match = True
break
elif new_event is NotImplemented:
pass
else:
event = new_event
match = True
if event is not None and (match or self.keep_mismatched):
yield event
else:
if self.keep_mismatched:
for event in self.provider.get_timeline_events(
req, start, stop, filters):
yield event
def __getattr__(self, attrnm):
"""Forward attribute access request to real provider
"""
try:
value = getattr(self.provider, attrnm)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (self.__class__.__name__, attrnm))
else:
return value
class TicketFieldTimelineFilter(Component):
"""A class filtering ticket events related to a given resource
associated via ticket fields.
"""
implements(ITimelineEventsFilter)
@property
def fields(self):
"""Available ticket fields
"""
field_names = getattr(self, '_fields', None)
if field_names is None:
self._fields = set(f['name'] for f in
TicketSystem(self.env).get_ticket_fields())
return self._fields
# ITimelineEventsFilter methods
def supported_providers(self):
"""This filter will work on ticket events. It also intercepts events
even when multi-product ticket module is installed.
"""
yield 'TicketModule'
yield 'ProductTicketModule'
def filter_event(self, context, provider, event, filters):
"""Decide whether the target of a ticket event has a particular custom
field set to the context resource's identifier.
"""
if context.resource is not None:
field_name = context.resource.realm
if field_name in self.fields.union(['ticket']):
try:
ticket_ids = event[3][0]
except:
self.log.exception('Unknown ticket event %s ... [SKIP]',
event)
return None
if not isinstance(ticket_ids, list):
ticket_ids = [ticket_ids]
context._ticket_cache = ticket_cache = \
getattr(context, '_ticket_cache', None) or {}
for t in ticket_ids:
if isinstance(t, Resource):
if event[0] != 'attachment':
t = t.id
else:
t = t.parent.id
try:
t = ticket_cache.get(t) or Ticket(self.env, t)
except ResourceNotFound:
return None
if field_name == 'ticket' and t.id == context.resource.id:
return event
if t[field_name] == context.resource.id:
return event
ticket_cache[t.id] = t
else:
return None
return NotImplemented
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/query.py | bloodhound_dashboard/bhdashboard/widgets/query.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying report data.
"""
from cgi import parse_qs
from datetime import datetime, date, time
from itertools import count, imap, islice
from genshi.builder import tag
from trac.core import implements, TracError
from trac.mimeview.api import Context
from trac.resource import Resource, ResourceNotFound
from trac.ticket.query import Query, QueryModule
from trac.web.api import RequestDone
from bhdashboard.util import dummy_request, merge_links, \
pretty_wrapper, trac_version, \
trac_tags
from bhdashboard.util.widgets import WidgetBase, InvalidIdentifier, \
check_widget_name
from bhdashboard.util.translation import _
from multiproduct.env import ProductEnvironment
class TicketQueryWidget(WidgetBase):
"""Display tickets matching a TracQuery using a grid
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'query' : {
'desc' : """Query string""",
'required' : True,
},
'max' : {
'default' : 0,
'desc' : """Limit the number of results displayed""",
'type' : int,
},
'page' : {
'desc' : """Page number""",
'type' : int,
},
'title' : {
'desc' : """Widget title""",
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Execute custom query and render data using a grid
"""
data = None
req = context.req
try:
params = ('query', 'max', 'page', 'title')
qstr, maxrows, page, title = self.bind_params(name, options, *params)
fakereq = dummy_request(self.env, req.authname)
fakereq.args = args = parse_qs(qstr)
fakereq.arg_list = []
for k,v in args.items():
# Patch for 0.13
fakereq.arg_list.extend((k, _v) for _v in v)
try:
if len(v) == 1:
args[k] = v[0]
except TypeError:
pass
more_link_href = req.href('query', args)
args.update({'page' : page, 'max': maxrows})
qrymdl = self.env[QueryModule]
if qrymdl is None :
raise TracError('Query module not available (disabled?)')
data = qrymdl.process_request(fakereq, self.env)[1]
except TracError, exc:
if data is not None:
exc.title = data.get('title', 'TracQuery')
raise
else:
qryctx = Context.from_request(fakereq)
query = data['query']
idxs = count()
headers = [dict(title=h['label'], col=h['name'], hidden=False,
asc=h['name'] == query.order and not query.desc) \
for h in data['headers']]
data.update(
dict(header_groups=[headers],
numrows=len(data['tickets']),
row_groups=[(group_value,
[{
'__color__' : t['priority_value'],
'__idx__' : idxs.next(),
'cell_groups' : [[
{
'header' : h,
'index' : hidx,
'value' : t[h['col']]
} \
for hidx, h in enumerate(headers)]],
'id' : t['id'],
'resource' : Resource('ticket', t['id']),
'href': t['href']
} for t in tickets]) \
for group_value, tickets in data['groups'] ]))
return 'widget_grid.html', \
{
'title' : title or _('Custom Query'),
'data' : data,
'ctxtnav' : [
tag.a(_('More'),
href=more_link_href)],
'altlinks' : fakereq.chrome.get('links', {}).get('alternate')
}, \
qryctx
render_widget = pretty_wrapper(render_widget, check_widget_name)
#--------------------------------------
# Query functions and methods
#--------------------------------------
def exec_query(env, req, qstr='status!=closed'):
""" Perform a ticket query, returning a list of ticket ID's.
"""
return Query.from_string(env, qstr).execute(req)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/ticket.py | bloodhound_dashboard/bhdashboard/widgets/ticket.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying ticket data.
"""
from itertools import imap, islice
from urllib import urlencode
from genshi.builder import tag
from genshi.core import Markup
from trac.core import implements, TracError
from trac.ticket.api import TicketSystem
from trac.ticket.query import Query
from trac.ticket.roadmap import apply_ticket_permissions, get_ticket_stats, \
ITicketGroupStatsProvider, RoadmapModule
from trac.util.text import unicode_urlencode
from trac.web.chrome import add_stylesheet
from bhdashboard.api import DateField, EnumField, InvalidWidgetArgument, \
ListField
from bhdashboard.widgets.query import exec_query
from bhdashboard.util import dummy_request, merge_links, minmax, \
pretty_wrapper, resolve_ep_class, \
trac_version, trac_tags
from bhdashboard.util.widgets import WidgetBase, check_widget_name
from bhdashboard.util.translation import _
from multiproduct.env import Product, ProductEnvironment
class TicketFieldValuesWidget(WidgetBase):
"""Display a tag cloud representing frequency of values assigned to
ticket fields.
"""
DASH_ITEM_HREF_MAP = {'milestone': ('milestone',),
}
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'field' : {
'desc' : """Target ticket field. """
"""Required if no group in `query`.""",
},
'query' : {
'desc' : """TracQuery used to filter target tickets.""",
},
'title' : {
'desc' : """Widget title""",
},
'verbose' : {
'desc' : """Show frequency next to each value""",
'default' : False,
'type' : bool,
},
'threshold' : {
'desc' : """Filter items having smaller frequency""",
'type' : int,
},
'max' : {
'default' : 0,
'desc' : """Limit the number of items displayed""",
'type' : int
},
'view' : {
'desc' : """Display mode. Should be one of the following
- `list` : Unordered value list (default)
- `cloud` : Similar to tag cloud
""",
'default' : 'list',
'type' : EnumField('list', 'cloud', 'table', 'compact'),
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Count ocurrences of values assigned to given ticket field.
"""
req = context.req
params = ('field', 'query', 'verbose', 'threshold', 'max', 'title',
'view')
fieldnm, query, verbose, threshold, maxitems, title, view = \
self.bind_params(name, options, *params)
field_maps = {'type': {'admin_url': 'type',
'title': _('Types'),
},
'status': {'admin_url': None,
'title': _('Statuses'),
},
'priority': {'admin_url': 'priority',
'title': _('Priorities'),
},
'milestone': {'admin_url': 'milestones',
'title': _('Milestones'),
},
'component': {'admin_url': 'components',
'title': _('Components'),
},
'version': {'admin_url': 'versions',
'title': _('Versions'),
},
'severity': {'admin_url': 'severity',
'title': _('Severities'),
},
'resolution': {'admin_url': 'resolution',
'title': _('Resolutions'),
},
}
_field = []
def check_field_name():
if fieldnm is None:
raise InvalidWidgetArgument('field', 'Missing ticket field')
tsys = self.env[TicketSystem]
if tsys is None:
raise TracError(_('Error loading ticket system (disabled?)'))
for field in tsys.get_ticket_fields():
if field['name'] == fieldnm:
_field.append(field)
break
else:
if fieldnm in field_maps:
admin_suffix = field_maps.get(fieldnm)['admin_url']
if 'TICKET_ADMIN' in req.perm and admin_suffix is not None:
hint = _('You can add one or more '
'<a href="%(url)s">here</a>.',
url=req.href.admin('ticket', admin_suffix))
else:
hint = _('Contact your administrator for further details')
return 'widget_alert.html', \
{
'title' : Markup(field_maps[fieldnm]['title']),
'data' : dict(msgtype='info',
msglabel="Note",
msgbody=Markup(_('''No values are
defined for ticket field
<em>%(field)s</em>. %(hint)s''',
field=fieldnm, hint=hint))
)
}, context
else:
raise InvalidWidgetArgument('field',
'Unknown ticket field %s' % (fieldnm,))
return None
if query is None :
data = check_field_name()
if data is not None:
return data
field = _field[0]
if field.get('custom'):
sql = "SELECT COALESCE(value, ''), count(COALESCE(value, ''))" \
" FROM ticket_custom " \
" WHERE name='%(name)s' GROUP BY COALESCE(value, '')"
else:
sql = "SELECT COALESCE(%(name)s, ''), " \
"count(COALESCE(%(name)s, '')) FROM ticket " \
"GROUP BY COALESCE(%(name)s, '')"
sql = sql % field
# TODO : Implement threshold and max
db_query = req.perm.env.db_query \
if isinstance(req.perm.env, ProductEnvironment) \
else req.perm.env.db_direct_query
with db_query as db:
cursor = db.cursor()
cursor.execute(sql)
items = cursor.fetchall()
QUERY_COLS = ['id', 'summary', 'owner', 'type', 'status', 'priority']
item_link= lambda item: req.href.query(col=QUERY_COLS + [fieldnm],
**{fieldnm:item[0]})
else:
query = Query.from_string(self.env, query, group=fieldnm)
if query.group is None:
data = check_field_name()
if data is not None:
return data
raise InvalidWidgetArgument('field',
'Invalid ticket field for ticket groups')
fieldnm = query.group
sql, v = query.get_sql()
sql = "SELECT COALESCE(%(name)s, '') , count(COALESCE(%(name)s, ''))"\
"FROM (%(sql)s) AS foo GROUP BY COALESCE(%(name)s, '')" % \
{ 'name' : fieldnm, 'sql' : sql }
db = self.env.get_db_cnx()
try :
cursor = db.cursor()
cursor.execute(sql, v)
items = cursor.fetchall()
finally:
cursor.close()
query_href = query.get_href(req.href)
item_link= lambda item: query_href + \
'&' + unicode_urlencode([(fieldnm, item[0])])
if fieldnm in self.DASH_ITEM_HREF_MAP:
def dash_item_link(item):
if item[0]:
args = self.DASH_ITEM_HREF_MAP[fieldnm] + (item[0],)
return req.href(*args)
else:
return item_link(item)
else:
dash_item_link = item_link
if title is None:
heading = _(fieldnm.capitalize())
else:
heading = None
return 'widget_cloud.html', \
{
'title' : title,
'data' : dict(
bounds=minmax(items, lambda x: x[1]),
item_link=dash_item_link,
heading=heading,
items=items,
verbose=verbose,
view=view,
),
}, \
context
render_widget = pretty_wrapper(render_widget, check_widget_name)
class TicketGroupStatsWidget(WidgetBase):
"""Display progress bar illustrating statistics gathered on a group
of tickets.
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'query' : {
'default' : 'status!=closed',
'desc' : """Query string""",
},
'stats_provider' : {
'desc' : """Name of the component implementing
`ITicketGroupStatsProvider`, which is used to collect statistics
on groups of tickets.""",
'default' : 'DefaultTicketGroupStatsProvider'
},
'skin' : {
'desc' : """Look and feel of the progress bar""",
'type' : EnumField('info', 'success', 'warning',
'danger',
'info-stripped', 'success-stripped',
'warning-stripped', 'danger-stripped')
},
'title' : {
'desc' : """Widget title""",
},
'legend' : {
'desc' : """Text on top of the progress bar""",
},
'desc' : {
'desc' : """Descriptive (wiki) text""",
},
'view' : {
'desc' : """Display mode to render progress info""",
'type' : EnumField('compact', 'standard')
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Prepare ticket stats
"""
req = context.req
params = ('query', 'stats_provider', 'skin', 'title', 'legend', 'desc',
'view')
qstr, pnm, skin, title, legend, desc, view = \
self.bind_params(name, options, *params)
statsp = resolve_ep_class(ITicketGroupStatsProvider, self, pnm,
default=RoadmapModule(self.env).stats_provider)
if skin is not None :
skin = (skin or '').split('-', 2)
tickets = exec_query(self.env, req, qstr)
tickets = apply_ticket_permissions(self.env, req, tickets)
stat = get_ticket_stats(statsp, tickets)
add_stylesheet(req, 'dashboard/css/bootstrap.css')
add_stylesheet(req, 'dashboard/css/bootstrap-responsive.css')
add_stylesheet(req, 'dashboard/css/roadmap.css')
return 'widget_progress.html', \
{
'title' : title,
'data' : dict(
desc=desc, legend=legend, bar_styles=skin,
stats=stat, view=view,
),
}, \
context
render_widget = pretty_wrapper(render_widget, check_widget_name)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/containers.py | bloodhound_dashboard/bhdashboard/widgets/containers.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets acting as containers.
"""
from genshi.builder import tag
from trac.core import implements, TracError
from bhdashboard.api import DashboardSystem, InvalidWidgetArgument, JsonField
from bhdashboard.util import dummy_request, merge_links, minmax, \
pretty_wrapper, trac_version, trac_tags
from bhdashboard.util.widgets import WidgetBase, check_widget_name
from bhdashboard.web_ui import DashboardModule
class ContainerWidget(WidgetBase):
"""Embed widgets positioned according to the rules defined by a layout.
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'layout' : {
'desc' : """Name of layout used to arrange widgets""",
'required' : True,
},
'schema' : {
'desc' : """Widgets and position (in JSON)""",
'required' : True,
'type' : JsonField()
},
'show_captions' : {
'desc' : """Show widget titles""",
'default' : False,
},
'title' : {
'desc' : """User-defined title""",
},
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
def render_widget(self, name, context, options):
"""Count ocurrences of values assigned to given ticket field.
"""
dbsys = DashboardSystem(self.env)
params = ('layout', 'schema', 'show_captions', 'title')
layout, schema, show_captions, title = \
self.bind_params(name, options, *params)
lp = dbsys.resolve_layout(layout)
dbmod = DashboardModule(self.env)
layout_data = lp.expand_layout(layout, context,
{ 'schema' : schema, 'embed' : True })
widgets = dbmod.expand_widget_data(context, schema)
return layout_data['template'], \
{
'title' : title,
'data' : dict(
context=context,
layout=schema,
widgets=widgets,
title='',
default={
'height' : dbmod.default_widget_height or None
}
),
}, \
context
render_widget = pretty_wrapper(render_widget, check_widget_name)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/__init__.py | bloodhound_dashboard/bhdashboard/widgets/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Available widgets.
"""
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/widgets/product.py | bloodhound_dashboard/bhdashboard/widgets/product.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying product information (multiproduct).
"""
import itertools
from genshi.builder import tag
from trac.resource import Neighborhood
from trac.ticket.model import Milestone, Component, Version
from trac.ticket.query import Query
from bhdashboard.util import pretty_wrapper
from bhdashboard.util.widgets import WidgetBase, check_widget_name
from bhdashboard.util.translation import _
from multiproduct.env import Product, ProductEnvironment
__metaclass__ = type
class ProductWidget(WidgetBase):
"""Display products available to the user.
"""
def get_widget_params(self, name):
"""Return a dictionary containing arguments specification for
the widget with specified name.
"""
return {
'max': {'desc': """Limit the number of products displayed""",
'type': int},
'cols': {'desc': """Number of columns""",
'type': int}
}
get_widget_params = pretty_wrapper(get_widget_params, check_widget_name)
COMMON_QUERY = 'order=priority&status=!closed&col=id&col=summary' \
'&col=owner&col=type&col=status&col=priority&col=product'
def _get_product_info(self, product, href, resource, max_):
penv = ProductEnvironment(self.env, product.prefix)
results = []
# some queries return a list/tuple, some a generator,
# hence count() to get the result length
def count(iter_):
try:
return len(iter_)
except TypeError:
return sum(1 for _ in iter_)
query = resource['type'].select(penv)
for q in itertools.islice(query, max_):
q.url = href(resource['name'], q.name) \
if resource.get('hrefurl') \
else Query.from_string(penv,
'%s=%s&%s&col=%s' % (resource['name'], q.name,
self.COMMON_QUERY, resource['name'])
).get_href(href)
q.ticket_count = penv.db_query("""
SELECT COUNT(*) FROM ticket WHERE ticket.%s='%s'
AND ticket.status <> 'closed'
""" % (resource['name'], q.name))[0][0]
results.append(q)
# add a '(No <milestone/component/version>)' entry if there are
# tickets without an assigned resource in the product
ticket_count = penv.db_query(
"""SELECT COUNT(*) FROM ticket WHERE %s=''
AND status <> 'closed'""" % (resource['name'],))[0][0]
if ticket_count != 0:
q = resource['type'](penv)
q.name = '(No %s)' % (resource['name'],)
q.url = Query.from_string(penv,
'status=!closed&col=id&col=summary&col=owner'
'&col=status&col=priority&order=priority&%s='
% (resource['name'],)
).get_href(href)
q.ticket_count = ticket_count
results.append(q)
results.sort(key=lambda x: x.ticket_count, reverse=True)
# add a link to the resource list if there are
# more than max resources defined
if count(query) > max_:
q = resource['type'](penv)
q.name = _('... more')
q.ticket_count = None
q.url = href(resource['name']) if resource.get('hrefurl') \
else href.dashboard()
results.append(q)
return results
def render_widget(self, name, context, options):
"""Gather product list and render data in compact view
"""
data = {}
req = context.req
title = ''
params = ('max', 'cols')
max_, cols = self.bind_params(name, options, *params)
if not isinstance(self.env, ProductEnvironment):
for p in Product.select(self.env):
if 'PRODUCT_VIEW' in req.perm(Neighborhood('product', p.prefix)):
penv = ProductEnvironment(self.env, p.prefix)
phref = ProductEnvironment.resolve_href(penv, self.env)
for resource in (
{'type': Milestone, 'name': 'milestone', 'hrefurl': True},
{'type': Component, 'name': 'component'},
{'type': Version, 'name': 'version'},
):
setattr(p, resource['name'] + 's',
self._get_product_info(p, phref, resource, max_))
p.owner_link = Query.from_string(self.env,
'status!=closed&col=id&col=summary&col=owner'
'&col=status&col=priority&order=priority'
'&group=product&owner=%s' % (p._data['owner'] or '', )
).get_href(phref)
p.href = phref()
data.setdefault('product_list', []).append(p)
title = _('Products')
data['colseq'] = itertools.cycle(xrange(cols - 1, -1, -1)) if cols \
else itertools.repeat(1)
return 'widget_product.html', {
'title': title,
'data': data,
'ctxtnav': [tag.a(_('More'), href=req.href('products'))],
}, context
render_widget = pretty_wrapper(render_widget, check_widget_name)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/layouts/__init__.py | bloodhound_dashboard/bhdashboard/layouts/__init__.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Available layouts.
"""
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/bloodhound_dashboard/bhdashboard/layouts/bootstrap.py | bloodhound_dashboard/bhdashboard/layouts/bootstrap.py | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
Widgets displaying ticket data.
"""
from uuid import uuid4
from trac.core import Component, implements, TracError
from trac.web.chrome import add_stylesheet, add_script
from bhdashboard.api import ILayoutProvider
class BootstrapLayout(Component):
"""Implement some basic bootstrap layouts
"""
implements(ILayoutProvider)
# ILayoutProvider methods
def get_layouts(self):
"""Supported layouts.
"""
yield 'bootstrap_grid'
yield 'bootstrap_btnbar'
def get_layout_description(self, name):
"""Return plain text description of the layout with specified name.
"""
return {
'bootstrap_grid' : "Bootstrap grid system " \
"http://twitter.github.com/bootstrap/scaffolding.html#layouts",
'bootstrap_btnbar' : "Button toolbar acting as tabs nav"
}[name]
def expand_layout(self, name, context, options):
"""Specify bootstrap layout template
"""
req = context.req
add_stylesheet(req, 'dashboard/css/bootstrap.css')
add_stylesheet(req, 'dashboard/css/bootstrap-responsive.css')
if name == 'bootstrap_btnbar':
self._process_btnbar(req, options)
results = {
('bootstrap_grid', False) : {
'template' : 'bs_grid_full.html',
},
('bootstrap_grid', True) : {
'template' : 'bs_grid.html',
},
('bootstrap_btnbar', False) : {
'template' : 'bs_btnbar_full.html',
},
('bootstrap_btnbar', True) : {
'template' : 'bs_btnbar.html',
},
}
return results[( name , bool(options.get('embed')) )]
# Internal methods
def _process_btnbar(self, req, options):
"""Determine toolbar groups
"""
layout_data = options['schema']
orig_tb = layout_data.get('toolbar', [])
ready = layout_data.get('ready')
if not ready:
layout_data['toolbar'] = tb = [[]]
last_group = tb[0]
active = layout_data.get('active')
for i, (caption, idx) in enumerate(orig_tb):
if caption == '|' :
last_group = []
tb.append(last_group)
else:
last_group.append(
{ 'caption' : caption, 'widget' :idx,
'id' : uuid4().hex, 'active' : i == active })
layout_data['ready'] = True
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/setup.py | installer/setup.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from setuptools import setup
DESC = """Installer for Apache Bloodhound
Adds the bloodhound_setup cli command.
"""
versions = [
(0, 8, 0),
(0, 9, 0),
]
latest = '.'.join(str(x) for x in versions[-1])
setup(
name="bloodhound_installer",
version=latest,
description=DESC.split('\n', 1)[0],
author="Apache Bloodhound",
license="Apache License v2",
url="https://bloodhound.apache.org/",
requires=['trac', 'BloodhoundMultiProduct'],
packages=['bhsetup'],
entry_points="""
[console_scripts]
bloodhound_setup = bhsetup.bloodhound_setup:run
""",
long_description=DESC,
)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/bloodhound_setup.py | installer/bloodhound_setup.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Initial configuration for Bloodhound"""
from bhsetup import bloodhound_setup
if __name__ == '__main__':
bloodhound_setup.run()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/tests.py | installer/tests.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for bloodhound_setup.py"""
import unittest
import shutil
import os
from tempfile import mkdtemp, NamedTemporaryFile
from bhsetup.bloodhound_setup import backupfile, BloodhoundSetup
from functools import partial
class BackupfileTest(unittest.TestCase):
"""Unit tests for backupfile routine"""
def setUp(self):
self.tempdir = mkdtemp()
self.original = NamedTemporaryFile(dir=self.tempdir)
def tearDown(self):
self.original.close()
shutil.rmtree(self.tempdir)
def test_backup_creates_new_file(self):
"""Checks that a new file is created"""
orig = self.original.name
new = backupfile(orig)
self.assertNotEqual(orig, new)
self.assert_(os.path.exists(orig))
self.assert_(os.path.exists(new))
def test_multiple_backup_creates_new_files(self):
"""Checks that, for a small finite number of backups, multiple
backups do not overwrite old backups. The limit is effectively 65"""
orig = self.original.name
backups = [backupfile(orig) for i in range(65)]
unique_set = set([orig, ] + backups)
self.assertEqual(len(unique_set), 66)
class BloodhoundSetupTest(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
self.bhs = BloodhoundSetup({})
self.basedata = {'section': {'option1': 'option1value',
'option2': 'option2value',},}
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_db_str_no_options(self):
"""Checks that giving no options at all has defaults enough to create
a sqlite db string"""
self.assertEqual(self.bhs._generate_db_str({}), 'sqlite:' +
os.path.join('db', 'bloodhound.db'))
def test_db_str_provided_db_string(self):
"""Checks that if a dbstring is provided it will not be respected above
other options"""
dbstr = 'sillyexample'
options = {'dbstring': dbstr,}
self.assertEqual(self.bhs._generate_db_str(options), dbstr)
def test_writeconfig_create_basic_config(self):
filepath = os.path.join(self.tempdir, 'basic.ini')
data = [self.basedata]
self.bhs.writeconfig(filepath, data)
self.assert_(os.path.exists(filepath))
#check the file
with file(filepath) as f:
fdata = f.read()
self.assertIn('option1value', fdata)
def test_writeconfig_update_config(self):
"""Checks that writing a new config with non-overlapping values updates
an existing file"""
filepath = os.path.join(self.tempdir, 'basic.ini')
data = [self.basedata]
self.bhs.writeconfig(filepath, data)
newdata = [{'section': {'option3': 'option3value',},}]
self.bhs.writeconfig(filepath, newdata)
#check the file
with file(filepath) as f:
fdata = f.read()
self.assertIn('option3value', fdata)
def test_writeconfig_update_config_overwrite_values(self):
"""Checks that writing a new config with non-overlapping values updates
an existing file"""
filepath = os.path.join(self.tempdir, 'basic.ini')
data = [self.basedata]
self.bhs.writeconfig(filepath, data)
newdata = [{'section': {'option2': 'newopt2value',},}]
self.bhs.writeconfig(filepath, newdata)
with file(filepath) as f:
fdata = f.read()
self.assertIn('newopt2value', fdata)
self.assertNotIn('option2value', fdata)
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/bhsetup/bloodhound_setup.py | installer/bhsetup/bloodhound_setup.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Initial configuration for Bloodhound"""
import os
import pkg_resources
import shutil
import sys
from bhsetup.createdigest import htdigest_create
from getpass import getpass
from optparse import OptionParser
try:
from trac.admin.console import TracAdmin
from trac.config import Configuration
from trac.util import translation
from trac.util.translation import _, get_negotiated_locale, has_babel
except ImportError, e:
print("Requirements must be installed before running "
"bloodhound_setup.py.\n"
"You can install them with the following command:\n"
" pip install -r requirements.txt\n")
sys.exit(1)
try:
import psycopg2
except ImportError:
psycopg2 = None
try:
import MySQLdb as mysqldb
except ImportError:
mysqldb = None
LANG = os.environ.get('LANG')
MAXBACKUPNUMBER = 64 # Max attempts to create backup file
SUPPORTED_DBTYPES = ('sqlite', 'postgres', 'mysql')
DEFAULT_DB_USER = 'bloodhound'
DEFAULT_DB_NAME = 'bloodhound'
DEFAULT_ADMIN_USER = 'admin'
DEFAULT_PROJECT = 'main'
DEFAULT_ENVSDIR = os.path.join('bloodhound', 'environments')
BH_PROJECT_SITE = 'https://issues.apache.org/bloodhound/'
BASE_CONFIG = {'components': {'bhtheme.*': 'enabled',
'bhdashboard.*': 'enabled',
'multiproduct.*': 'enabled',
'permredirect.*': 'enabled',
'themeengine.api.*': 'enabled',
'themeengine.web_ui.*': 'enabled',
'bhsearch.*': 'enabled',
'bhrelations.*': 'enabled',
'trac.ticket.web_ui.ticketmodule': 'disabled',
'trac.ticket.report.reportmodule': 'disabled',
},
'header_logo': {'src': '',},
'mainnav': {'roadmap': 'disabled',
'search': 'disabled',
'timeline': 'disabled',},
'metanav': {'about': 'disabled',},
'theme': {'theme': 'bloodhound',},
'trac': {'mainnav': ','.join(['dashboard', 'wiki', 'browser',
'tickets', 'newticket', 'timeline',
'roadmap', 'search']),
'environment_factory': '',
'request_factory': '',},
'project': {'footer': ('Get involved with '
'<a href="%(site)s">Apache Bloodhound</a>'
% {'site': BH_PROJECT_SITE,}),},
'labels': {'application_short': 'Bloodhound',
'application_full': 'Apache Bloodhound',
'footer_left_prefix': '',
'footer_left_postfix': '',
'footer_right': ''},
'bhsearch': {'is_default': 'true', 'enable_redirect': 'true'},
}
ACCOUNTS_CONFIG = {'account-manager': {'account_changes_notify_addresses' : '',
'authentication_url' : '',
'db_htdigest_realm' : '',
'force_passwd_change' :'true',
'hash_method' : 'HtDigestHashMethod',
'htdigest_file' : '',
'htdigest_realm' : '',
'htpasswd_file' : '',
'htpasswd_hash_type' : 'crypt',
'password_store' : 'HtDigestStore',
'persistent_sessions' : 'False',
'refresh_passwd' : 'False',
'user_lock_max_time' : '0',
'verify_email' : 'True',
},
'components': {'acct_mgr.admin.*' : 'enabled',
'acct_mgr.api.accountmanager' : 'enabled',
'acct_mgr.guard.accountguard' : 'enabled',
'acct_mgr.htfile.htdigeststore' : 'enabled',
'acct_mgr.macros.*': 'enabled',
'acct_mgr.web_ui.accountmodule' : 'enabled',
'acct_mgr.web_ui.loginmodule' : 'enabled',
'trac.web.auth.loginmodule' : 'disabled',
},
}
class BloodhoundSetup(object):
"""Creates a Bloodhound environment"""
def __init__(self, opts):
if isinstance(opts, dict):
options = dict(opts)
else:
options = vars(opts)
self.options = options
if 'project' not in options:
options['project'] = DEFAULT_PROJECT
if 'envsdir' not in options:
options['envsdir'] = DEFAULT_ENVSDIR
# Flags used when running the functional test suite
self.apply_bhwiki_upgrades = True
def _generate_db_str(self, options):
"""Builds an appropriate db string for trac-admin for sqlite and
postgres options. Also allows for a user to provide their own db
string to allow database initialisation beyond these."""
dbdata = {'type': options.get('dbtype', 'sqlite'),
'user': options.get('dbuser'),
'pass': options.get('dbpass'),
'host': options.get('dbhost', 'localhost'),
'port': options.get('dbport'),
'name': options.get('dbname', 'bloodhound'),
}
db = options.get('dbstring')
if db is None:
if dbdata['type'] in ('postgres', 'mysql') \
and dbdata['user'] is not None \
and dbdata['pass'] is not None:
if dbdata['port'] is not None:
db = '%(type)s://%(user)s:%(pass)s@%(host)s:%(port)s/%(name)s'
else: # no port specified = default port
db = '%(type)s://%(user)s:%(pass)s@%(host)s/%(name)s'
else:
db = '%%(type)s:%s' % os.path.join('db', '%(name)s.db')
return db % dbdata
def setup(self, **kwargs):
"""Do the setup. A kwargs dictionary may be passed to override base
options, potentially allowing for multiple environment creation."""
if has_babel:
import babel
try:
locale = get_negotiated_locale([LANG])
locale = locale or babel.Locale.default()
except babel.UnknownLocaleError:
pass
translation.activate(locale)
options = dict(self.options)
options.update(kwargs)
if psycopg2 is None and options.get('dbtype') == 'postgres':
print "psycopg2 needs to be installed to initialise a postgresql db"
return False
elif mysqldb is None and options.get('dbtype') == 'mysql':
print "MySQLdb needs to be installed to initialise a mysql db"
return False
environments_path = options['envsdir']
if not os.path.exists(environments_path):
os.makedirs(environments_path)
new_env = os.path.join(environments_path, options['project'])
tracini = os.path.abspath(os.path.join(new_env, 'conf', 'trac.ini'))
baseini = os.path.abspath(os.path.join(new_env, 'conf', 'base.ini'))
options['inherit'] = '"' + baseini + '"'
options['db'] = self._generate_db_str(options)
if 'repo_type' not in options or options['repo_type'] is None:
options['repo_type'] = ''
if 'repo_path' not in options or options['repo_path'] is None:
options['repo_path'] = ''
if (len(options['repo_type']) > 0) ^ (len(options['repo_path']) > 0):
print "Error: Specifying a repository requires both the "\
"repository-type and the repository-path options."
return False
custom_prefix = 'default_product_prefix'
if custom_prefix in options and options[custom_prefix]:
default_product_prefix = options[custom_prefix]
else:
default_product_prefix = '@'
digestfile = os.path.abspath(os.path.join(new_env,
options['digestfile']))
realm = options['realm']
adminuser = options['adminuser']
adminpass = options['adminpass']
# create base options:
accounts_config = dict(ACCOUNTS_CONFIG)
accounts_config['account-manager']['htdigest_file'] = digestfile
accounts_config['account-manager']['htdigest_realm'] = realm
trac = TracAdmin(os.path.abspath(new_env))
if not trac.env_check():
try:
rv = trac.do_initenv('%(project)s %(db)s '
'%(repo_type)s %(repo_path)s '
'--inherit=%(inherit)s '
'--nowiki'
% options)
if rv == 2:
raise SystemExit
except SystemExit:
print ("Error: Unable to initialise the environment.")
return False
else:
print ("Warning: Environment already exists at %s." % new_env)
self.writeconfig(tracini, [{'inherit': {'file': baseini},},])
base_config = dict(BASE_CONFIG)
base_config['trac']['environment_factory'] = \
'multiproduct.hooks.MultiProductEnvironmentFactory'
base_config['trac']['request_factory'] = \
'multiproduct.hooks.ProductRequestFactory'
if default_product_prefix != '@':
base_config['multiproduct'] = dict(
default_product_prefix=default_product_prefix
)
self.writeconfig(baseini, [base_config, accounts_config])
if os.path.exists(digestfile):
backupfile(digestfile)
htdigest_create(digestfile, adminuser, realm, adminpass)
print "Adding TRAC_ADMIN permissions to the admin user %s" % adminuser
trac.onecmd('permission add %s TRAC_ADMIN' % adminuser)
# get fresh TracAdmin instance (original does not know about base.ini)
bloodhound = TracAdmin(os.path.abspath(new_env))
# final upgrade
print "Running upgrades"
bloodhound.onecmd('upgrade')
pages = []
pages.append(pkg_resources.resource_filename('bhdashboard',
'default-pages'))
pages.append(pkg_resources.resource_filename('bhsearch',
'default-pages'))
bloodhound.onecmd('wiki load %s' % " ".join(pages))
print "Running wiki upgrades"
bloodhound.onecmd('wiki upgrade')
if self.apply_bhwiki_upgrades:
print "Running wiki Bloodhound upgrades"
bloodhound.onecmd('wiki bh-upgrade')
else:
print "Skipping Bloodhound wiki upgrades"
print "Loading default product wiki"
bloodhound.onecmd('product admin %s wiki load %s' %
(default_product_prefix,
" ".join(pages)))
print "Running default product wiki upgrades"
bloodhound.onecmd('product admin %s wiki upgrade' %
default_product_prefix)
if self.apply_bhwiki_upgrades:
print "Running default product Bloodhound wiki upgrades"
bloodhound.onecmd('product admin %s wiki bh-upgrade' %
default_product_prefix)
else:
print "Skipping default product Bloodhound wiki upgrades"
print """
You can now start Bloodhound by running:
tracd --port=8000 %s
And point your browser at http://localhost:8000/%s
""" % (os.path.abspath(new_env), options['project'])
return True
def writeconfig(self, filepath, dicts=[]):
"""Writes or updates a config file. A list of dictionaries is used so
that options for different aspects of the configuration can be kept
separate while being able to update the same sections. Note that the
result is order dependent where two dictionaries update the same
option.
"""
config = Configuration(filepath)
file_changed = False
for data in dicts:
for section, options in data.iteritems():
for key, value in options.iteritems():
if config.get(section, key, None) != value:
# This should be expected to generate a false positive
# when two dictionaries update the same option
file_changed = True
config.set(section, key, value)
if file_changed:
if os.path.exists(filepath):
backupfile(filepath)
config.save()
def backupfile(filepath):
"""Very basic backup routine"""
print "Warning: Updating %s." % filepath
backuppath = None
if not os.path.exists(filepath + '_bak'):
backuppath = filepath + '_bak'
else:
backuptemplate = filepath + '_bak_%d'
for i in xrange(MAXBACKUPNUMBER):
if not os.path.exists(backuptemplate % i):
backuppath = backuptemplate % i
break
if backuppath is not None:
shutil.copyfile(filepath, backuppath)
print "Backup created at %s." % backuppath
else:
print "No backup created (too many other backups found)"
return backuppath
def handle_options():
"""Parses the command line, with basic prompting for choices where options
are not specified."""
parser = OptionParser()
# Base Trac Options
parser.add_option('--project', dest='project',
help='Set the top project name', default='')
parser.add_option('--source_directory', dest='sourcedir',
help='Specify root source code directory',
default=os.path.normpath(os.path.join(os.getcwd(), '../'))),
parser.add_option('--environments_directory', dest='envsdir',
help='Set the directory to contain environments',
default='')
parser.add_option('-d', '--database-type', dest='dbtype',
help="Specify as either 'sqlite', 'postgres' or 'mysql'",
default='')
parser.add_option('--database-string', dest='dbstring',
help=('Advanced: provide a custom database string, '
'overriding other database options'),
default=None)
parser.add_option('--database-name', dest='dbname',
help='Specify the database name',
default='bloodhound')
parser.add_option('-u', '--user', dest='dbuser',
help='Specify the db user (required for postgres and mysql)',
default='')
parser.add_option('-p', '--password', dest='dbpass',
help='Specify the db password (required for postgres and mysql)')
parser.add_option('--database-host', dest='dbhost',
help='Specify the database host (optional for postgres and mysql)',
default='localhost')
parser.add_option('--database-port', dest='dbport',
help='Specify the database port (optional for postgres and mysql)',
default='5432')
# Account Manager Options
parser.add_option('--admin-password', dest='adminpass',
help='create an admin user in an htdigest file')
parser.add_option('--digest-realm', dest='realm', default='bloodhound',
help='authentication realm for htdigest file')
parser.add_option('--admin-user', dest='adminuser', default='',
help='admin user name for htdigest file')
parser.add_option('--digest-file', dest='digestfile',
default='bloodhound.htdigest',
help='filename for the htdigest file')
# Repository Options
parser.add_option('--repository-type', dest='repo_type',
help='specify the repository type - ')
parser.add_option('--repository-path', dest='repo_path',
help='specify the repository type')
# Multiproduct options
parser.add_option('--default-product-prefix', dest='default_product_prefix',
help='Specify prefix for default product (defaults to @')
(options, args) = parser.parse_args()
if args:
print "Unprocessed options/arguments: ", args
def ask_question(question, default=None):
"""Basic question asking functionality"""
if default:
answer = raw_input(question % default)
else:
answer = raw_input(question)
return answer if answer else default
def ask_password(user):
"""Asks for a password to be provided for setting purposes"""
attempts = 3
for attempt in range(attempts):
if attempt > 0:
print "Passwords empty or did not match. Please try again",
print "(attempt %d/%d)""" % (attempt+1, attempts)
password1 = getpass('Enter a new password for "%s": ' % user)
password2 = getpass('Please reenter the password: ')
if password1 and password1 == password2:
return password1
print "Passwords did not match. Quiting."
sys.exit(1)
if options.dbtype.lower() not in SUPPORTED_DBTYPES:
answer = ask_question("""
This installer is able to install Apache Bloodhound with either SQLite,
PostgreSQL or MySQL databases. SQLite is an easier option for installing
Bloodhound as SQLite support is built into Python and requires no special
permissions to run. However, PostgreSQL and MySQL are generally expected to
be more robust for production use.
What type of database do you want to instant to (%s)?
[%%s]: """ % '/'.join(SUPPORTED_DBTYPES), default='sqlite')
answer = answer.lower()
if answer in SUPPORTED_DBTYPES:
options.dbtype = answer
else:
print "Unrecognized dbtype \"%s\". Quiting." % answer
sys.exit(1)
else:
options.dbtype = options.dbtype.lower()
if options.dbtype in ('postgres','mysql'):
if not options.dbuser:
options.dbuser = ask_question("""
For PostgreSQL/MySQL you need to have PostgreSQL/MySQL installed and you need
to have created a database user to connect to the database with. Setting this
up may require admin access rights to the server.
DB user name [%s]: """, DEFAULT_DB_USER)
if not options.dbpass:
options.dbpass = ask_password(options.dbuser)
if not options.dbname:
options.dbname = ask_question("""
For PostgreSQL/MySQL setup, you need to specify a database that you have
created for Bloodhound to use. This installer currently assumes that this
database will be empty. DB name [%s]: """, DEFAULT_DB_NAME)
if not options.adminuser:
options.adminuser = ask_question("""
Please supply a username for the admin user [%s]: """, DEFAULT_ADMIN_USER)
if not options.adminpass:
options.adminpass = ask_password(options.adminuser)
if not options.project:
options.project = ask_question("""
For the installation process, you can specify the top project name.
This installer currently assumes that project name is 'main'.
Project name [%s]: """, DEFAULT_PROJECT)
if not options.envsdir:
options.envsdir = ask_question("""
For the installation process, you can specify the directory to contain environments.
This installer currently assumes that environments directory is './bloodhound/environments'.
Environments directory [%s]: """, DEFAULT_ENVSDIR)
return options
def run():
options = handle_options()
bsetup = BloodhoundSetup(options)
bsetup.setup()
if __name__ == '__main__':
run()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/bhsetup/createdigest.py | installer/bhsetup/createdigest.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Simple htdigest file creation.
Saves the need for an installed htdigest program"""
import os
import shutil
import sys
from tempfile import mkstemp
from optparse import OptionParser
from hashlib import md5
from getpass import getpass
DEFAULT_USER = 'admin'
DEFAULT_REALM = 'default'
DEFAULT_FILE = 'default.htdigest'
def htdigest_create(filename, user, realm, password, path=''):
"""Create an htdigest file by adding the user to the file
Just in case an htdigest file already exists, this function will copy the
data line by line into a temporary file, commenting out any lines that match
the user and realm data. The new entry is then appended before the temporary
copy is moved back to the original location"""
user_realm = ':'.join((user, realm))
digest = md5(':'.join((user_realm, password))).hexdigest()
data = ':'.join((user_realm, digest)) + '\n'
filepath = os.path.join(path, filename)
temp, tempfilepath = mkstemp()
with open(tempfilepath,'w') as tempdigestfile:
if os.path.exists(filepath):
with open(filepath) as origdigestfile:
for line in origdigestfile:
if line.strip().startswith(user_realm + ':'):
tempdigestfile.write('#' + line)
else:
tempdigestfile.write(line)
tempdigestfile.write(data)
os.close(temp)
if os.path.exists(filepath):
os.remove(filepath)
shutil.move(tempfilepath, filepath)
def main():
"""Parse arguments and run the function"""
parser = OptionParser()
parser.add_option('-f', '--digestfile', dest='digestfile',
help='htdigest filename')
parser.add_option('-r', '--realm', dest='realm',
help='authentication realm')
parser.add_option('-u', '--user', dest='user',
help='user name')
parser.add_option('-p', '--password', dest='password',
help='password for USER')
(opts, args) = parser.parse_args()
if not opts.digestfile:
input_file = raw_input('Enter the file [%s]: ' % DEFAULT_FILE)
opts.digestfile = input_file if input_file else DEFAULT_FILE
path, filename = os.path.split(opts.digestfile)
if not opts.user:
input_user = raw_input('Enter the user [%s]: ' % DEFAULT_USER)
opts.user = input_user if input_user else DEFAULT_USER
if not opts.password:
attempts = 3
for attempt in range(attempts):
if attempt > 0:
print "Passwords empty or did not match. Please try again",
print "(attempt %d/%d)""" % (attempt+1, attempts)
password1 = getpass('Enter a new password for "%s": ' % opts.user)
password2 = getpass('Please reenter the password: ')
if password1 and password1 == password2:
opts.password = password1
break
if not opts.password:
print "Passwords did not match. Quitting."
sys.exit(1)
if not opts.realm:
input_realm = raw_input('Enter the auth realm [%s]: ' % DEFAULT_REALM)
opts.realm = input_realm if input_realm else DEFAULT_REALM
htdigest_create(filename, opts.user, opts.realm, opts.password, path)
if __name__ == '__main__':
main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/installer/bhsetup/__init__.py | installer/bhsetup/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/metrics.py | zamba/metrics.py | from typing import Generator, List, Optional, Tuple
import numpy as np
from sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score
def compute_species_specific_metrics(
y_true: np.ndarray,
y_pred: np.ndarray,
labels: Optional[List[str]] = None,
) -> Generator[Tuple[str, int, float], None, None]:
"""Computes species-specific accuracy, F1, precision, and recall.
Args:
y_true (np.ndarray): An array with shape (samples, species) where each value indicates
the presence of a species in a sample.
y_pred (np.ndarray): An array with shape (samples, species) where each value indicates
the predicted presence of a species in a sample.
Yields:
str, int, float: The metric name, species label index, and metric value.
"""
if labels is None:
labels = range(y_true.shape[1])
elif len(labels) != y_true.shape[1]:
raise ValueError(
f"The number of labels ({len(labels)}) must match the number of columns in y_true ({y_true.shape[1]})."
)
for index, label in enumerate(labels):
yield "accuracy", label, accuracy_score(y_true[:, index], y_pred[:, index])
yield "f1", label, f1_score(y_true[:, index], y_pred[:, index], zero_division=0)
yield "precision", label, precision_score(
y_true[:, index], y_pred[:, index], zero_division=0
)
yield "recall", label, recall_score(y_true[:, index], y_pred[:, index], zero_division=0)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/cli.py | zamba/cli.py | from pathlib import Path
from typing import Optional
import warnings
from loguru import logger
from pydantic.error_wrappers import ValidationError
from tqdm import tqdm
import typer
import yaml
from zamba.data.video import VideoLoaderConfig
from zamba.image_cli import app as image_app
from zamba.models.config import (
ModelConfig,
ModelEnum,
PredictConfig,
TrainConfig,
)
from zamba import MODELS_DIRECTORY
from zamba.models.densepose import DensePoseConfig, DensePoseOutputEnum
from zamba.models.depth_estimation import DepthEstimationConfig
from zamba.models.model_manager import ModelManager
from zamba.models.utils import RegionEnum
from zamba.utils_cli import app as utils_app
from zamba.version import __version__
# make logger work with tqdm
logger.remove()
logger.add(lambda msg: tqdm.write(msg, end=""), colorize=True)
app = typer.Typer(pretty_exceptions_show_locals=False)
app.add_typer(image_app, name="image", help="Tools for working with images instead of videos.")
app.add_typer(utils_app, name="utils", help="Utilities")
@app.command()
def train(
data_dir: Path = typer.Option(None, exists=True, help="Path to folder containing videos."),
labels: Path = typer.Option(None, exists=True, help="Path to csv containing video labels."),
model: ModelEnum = typer.Option(
"time_distributed",
help="Model to train. Model will be superseded by checkpoint if provided.",
),
checkpoint: Path = typer.Option(
None,
exists=True,
help="Model checkpoint path to use for training. If provided, model is not required.",
),
config: Path = typer.Option(
None,
exists=True,
help="Specify options using yaml configuration file instead of through command line options.",
),
batch_size: int = typer.Option(None, help="Batch size to use for training."),
gpus: int = typer.Option(
None,
help="Number of GPUs to use for training. If not specifiied, will use all GPUs found on machine.",
),
dry_run: bool = typer.Option(
None,
help="Runs one batch of train and validation to check for bugs.",
),
save_dir: Path = typer.Option(
None,
help="An optional directory in which to save the model checkpoint and configuration file. If not specified, will save to a `version_n` folder in your working directory.",
),
num_workers: int = typer.Option(
None,
help="Number of subprocesses to use for data loading.",
),
weight_download_region: RegionEnum = typer.Option(
None, help="Server region for downloading weights."
),
skip_load_validation: bool = typer.Option(
None,
help="Skip check that verifies all videos can be loaded prior to training. Only use if you're very confident all your videos can be loaded.",
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to training.",
),
):
"""Train a model on your labeled data.
If an argument is specified in both the command line and in a yaml file, the command line input will take precedence.
"""
if config is not None:
with config.open() as f:
config_dict = yaml.safe_load(f)
config_file = config
else:
with (MODELS_DIRECTORY / f"{model.value}/config.yaml").open() as f:
config_dict = yaml.safe_load(f)
config_file = None
if "video_loader_config" in config_dict.keys():
video_loader_config = VideoLoaderConfig(**config_dict["video_loader_config"])
else:
video_loader_config = None
train_dict = config_dict["train_config"]
# override if any command line arguments are passed
if data_dir is not None:
train_dict["data_dir"] = data_dir
if labels is not None:
train_dict["labels"] = labels
if model != "time_distributed":
train_dict["model_name"] = model
if checkpoint is not None:
train_dict["checkpoint"] = checkpoint
if batch_size is not None:
train_dict["batch_size"] = batch_size
if gpus is not None:
train_dict["gpus"] = gpus
if dry_run is not None:
train_dict["dry_run"] = dry_run
if save_dir is not None:
train_dict["save_dir"] = save_dir
if num_workers is not None:
train_dict["num_workers"] = num_workers
if weight_download_region is not None:
train_dict["weight_download_region"] = weight_download_region
if skip_load_validation is not None:
train_dict["skip_load_validation"] = skip_load_validation
try:
manager = ModelManager(
ModelConfig(
video_loader_config=video_loader_config,
train_config=TrainConfig(**train_dict),
)
)
except ValidationError as ex:
logger.error("Invalid configuration.")
raise typer.Exit(ex)
config = manager.config
# get species to confirm
spacer = "\n\t- "
species = spacer + spacer.join(
sorted(
[
c.split("species_", 1)[1]
for c in config.train_config.labels.filter(regex="species").columns
]
)
)
msg = f"""The following configuration will be used for training:
Config file: {config_file}
Data directory: {data_dir if data_dir is not None else config_dict["train_config"].get("data_dir")}
Labels csv: {labels if labels is not None else config_dict["train_config"].get("labels")}
Species: {species}
Model name: {config.train_config.model_name}
Checkpoint: {checkpoint if checkpoint is not None else config_dict["train_config"].get("checkpoint")}
Batch size: {config.train_config.batch_size}
Number of workers: {config.train_config.num_workers}
GPUs: {config.train_config.gpus}
Dry run: {config.train_config.dry_run}
Save directory: {config.train_config.save_dir}
Weight download region: {config.train_config.weight_download_region}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to train.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
if yes:
# kick off training
manager.train()
@app.command()
def predict(
data_dir: Path = typer.Option(None, exists=True, help="Path to folder containing videos."),
filepaths: Path = typer.Option(
None, exists=True, help="Path to csv containing `filepath` column with videos."
),
model: ModelEnum = typer.Option(
"time_distributed",
help="Model to use for inference. Model will be superseded by checkpoint if provided.",
),
checkpoint: Path = typer.Option(
None,
exists=True,
help="Model checkpoint path to use for inference. If provided, model is not required.",
),
gpus: int = typer.Option(
None,
help="Number of GPUs to use for inference. If not specifiied, will use all GPUs found on machine.",
),
batch_size: int = typer.Option(None, help="Batch size to use for training."),
save: bool = typer.Option(
None,
help="Whether to save out predictions. If you want to specify the output directory, use save_dir instead.",
),
save_dir: Path = typer.Option(
None,
help="An optional directory in which to save the model predictions and configuration yaml. "
"Defaults to the current working directory if save is True.",
),
dry_run: bool = typer.Option(None, help="Runs one batch of inference to check for bugs."),
config: Path = typer.Option(
None,
exists=True,
help="Specify options using yaml configuration file instead of through command line options.",
),
proba_threshold: float = typer.Option(
None,
help="Probability threshold for classification between 0 and 1. If specified binary predictions "
"are returned with 1 being greater than the threshold, 0 being less than or equal to. If not "
"specified, probabilities between 0 and 1 are returned.",
),
output_class_names: bool = typer.Option(
None,
help="If True, we just return a video and the name of the most likely class. If False, "
"we return a probability or indicator (depending on --proba_threshold) for every "
"possible class.",
),
num_workers: int = typer.Option(
None,
help="Number of subprocesses to use for data loading.",
),
weight_download_region: RegionEnum = typer.Option(
None, help="Server region for downloading weights."
),
skip_load_validation: bool = typer.Option(
None,
help="Skip check that verifies all videos can be loaded prior to inference. Only use if you're very confident all your videos can be loaded.",
),
overwrite: bool = typer.Option(
None, "--overwrite", "-o", help="Overwrite outputs in the save directory if they exist."
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to prediction.",
),
):
"""Identify species in a video.
This is a command line interface for prediction on camera trap footage. Given a path to camera
trap footage, the predict function use a deep learning model to predict the presence or absense of
a variety of species of common interest to wildlife researchers working with camera trap data.
If an argument is specified in both the command line and in a yaml file, the command line input will take precedence.
"""
if config is not None:
with config.open() as f:
config_dict = yaml.safe_load(f)
config_file = config
else:
with (MODELS_DIRECTORY / f"{model.value}/config.yaml").open() as f:
config_dict = yaml.safe_load(f)
config_file = None
if "video_loader_config" in config_dict.keys():
video_loader_config = VideoLoaderConfig(**config_dict["video_loader_config"])
else:
video_loader_config = None
predict_dict = config_dict["predict_config"]
# override if any command line arguments are passed
if data_dir is not None:
predict_dict["data_dir"] = data_dir
if filepaths is not None:
predict_dict["filepaths"] = filepaths
if model != "time_distributed":
predict_dict["model_name"] = model
if checkpoint is not None:
predict_dict["checkpoint"] = checkpoint
if batch_size is not None:
predict_dict["batch_size"] = batch_size
if gpus is not None:
predict_dict["gpus"] = gpus
if dry_run is not None:
predict_dict["dry_run"] = dry_run
if save is not None:
predict_dict["save"] = save
# save_dir takes precedence over save
if save_dir is not None:
predict_dict["save_dir"] = save_dir
if proba_threshold is not None:
predict_dict["proba_threshold"] = proba_threshold
if output_class_names is not None:
predict_dict["output_class_names"] = output_class_names
if num_workers is not None:
predict_dict["num_workers"] = num_workers
if weight_download_region is not None:
predict_dict["weight_download_region"] = weight_download_region
if skip_load_validation is not None:
predict_dict["skip_load_validation"] = skip_load_validation
if overwrite is not None:
predict_dict["overwrite"] = overwrite
try:
manager = ModelManager(
ModelConfig(
video_loader_config=video_loader_config,
predict_config=PredictConfig(**predict_dict),
)
)
except ValidationError as ex:
logger.error("Invalid configuration.")
raise typer.Exit(ex)
config = manager.config
msg = f"""The following configuration will be used for inference:
Config file: {config_file}
Data directory: {data_dir if data_dir is not None else config_dict["predict_config"].get("data_dir")}
Filepath csv: {filepaths if filepaths is not None else config_dict["predict_config"].get("filepaths")}
Model: {config.predict_config.model_name}
Checkpoint: {checkpoint if checkpoint is not None else config_dict["predict_config"].get("checkpoint")}
Batch size: {config.predict_config.batch_size}
Number of workers: {config.predict_config.num_workers}
GPUs: {config.predict_config.gpus}
Dry run: {config.predict_config.dry_run}
Save directory: {config.predict_config.save_dir}
Proba threshold: {config.predict_config.proba_threshold}
Output class names: {config.predict_config.output_class_names}
Weight download region: {config.predict_config.weight_download_region}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to prediction.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
if yes:
# kick off prediction
manager.predict()
def version_callback(version: bool):
"""Print zamba package version and exit."""
if version:
typer.echo(__version__)
raise typer.Exit()
@app.callback()
def main(
version: Optional[bool] = typer.Option(
None,
"--version",
callback=version_callback,
is_eager=True,
help="Show zamba version and exit.",
),
):
"""Zamba is a tool built in Python to automatically identify the species seen
in camera trap videos from sites in Africa and Europe. Visit
https://zamba.drivendata.org/docs for more in-depth documentation."""
@app.command()
def densepose(
data_dir: Path = typer.Option(
None, exists=True, help="Path to video or image file or folder containing images/videos."
),
filepaths: Path = typer.Option(
None, exists=True, help="Path to csv containing `filepath` column with videos."
),
save_dir: Path = typer.Option(
None,
help="An optional directory for saving the output. Defaults to the current working directory.",
),
config: Path = typer.Option(
None,
exists=True,
help="Specify options using yaml configuration file instead of through command line options.",
),
fps: float = typer.Option(
1.0, help="Number of frames per second to process. Defaults to 1.0 (1 frame per second)."
),
output_type: DensePoseOutputEnum = typer.Option(
"chimp_anatomy",
help="If 'chimp_anatomy' will apply anatomy model from densepose to the rendering and create a CSV with "
"the anatomy visible in each frame. If 'segmentation', will just output the segmented area where an animal "
"is identified, which works for more species than chimpanzees.",
),
render_output: bool = typer.Option(
False,
help="If True, generate an output image or video with either the segmentation or anatomy rendered "
"depending on the `output_type` that is chosen.",
),
weight_download_region: RegionEnum = typer.Option(
None, help="Server region for downloading weights."
),
cache_dir: Path = typer.Option(
None,
exists=False,
help="Path to directory for model weights. Alternatively, specify with environment variable `MODEL_CACHE_DIR`. If not specified, user's cache directory is used.",
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to prediction.",
),
):
"""Run densepose algorithm on videos.
If an argument is specified in both the command line and in a yaml file, the command line input will take precedence.
"""
if config is not None:
with config.open() as f:
config_dict = yaml.safe_load(f)
config_file = config
else:
config_dict = {}
config_file = None
if "video_loader_config" not in config_dict.keys():
config_dict["video_loader_config"] = dict()
if fps is not None:
config_dict["video_loader_config"]["fps"] = fps
predict_dict = config_dict
# override if any command line arguments are passed
if data_dir is not None:
predict_dict["data_dir"] = data_dir
if filepaths is not None:
predict_dict["filepaths"] = filepaths
if save_dir is not None:
predict_dict["save_dir"] = save_dir
if weight_download_region is not None:
predict_dict["weight_download_region"] = weight_download_region
if cache_dir is not None:
predict_dict["cache_dir"] = cache_dir
if output_type is not None:
predict_dict["output_type"] = output_type
if render_output is not None:
predict_dict["render_output"] = render_output
try:
densepose_config = DensePoseConfig(**predict_dict)
except ValidationError as ex:
logger.error("Invalid configuration.")
raise typer.Exit(ex)
msg = f"""The following configuration will be used for inference:
Config file: {config_file}
Output type: {densepose_config.output_type}
Render output: {densepose_config.render_output}
Data directory: {data_dir if data_dir is not None else config_dict.get("data_dir")}
Filepath csv: {filepaths if filepaths is not None else config_dict.get("filepaths")}
Weight download region: {densepose_config.weight_download_region}
Cache directory: {densepose_config.cache_dir}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to prediction.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
if yes:
# kick off prediction
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
densepose_config.run_model()
@app.command()
def depth(
filepaths: Path = typer.Option(
None, exists=True, help="Path to csv containing `filepath` column with videos."
),
data_dir: Path = typer.Option(None, exists=True, help="Path to folder containing videos."),
save_to: Path = typer.Option(
None,
help="An optional directory or csv path for saving the output. Defaults to `depth_predictions.csv` in the working directory.",
),
overwrite: bool = typer.Option(
None, "--overwrite", "-o", help="Overwrite output csv if it exists."
),
batch_size: int = typer.Option(None, help="Batch size to use for inference."),
num_workers: int = typer.Option(
None,
help="Number of subprocesses to use for data loading.",
),
gpus: int = typer.Option(
None,
help="Number of GPUs to use for inference. If not specifiied, will use all GPUs found on machine.",
),
model_cache_dir: Path = typer.Option(
None,
exists=False,
help="Path to directory for downloading model weights. Alternatively, specify with environment variable `MODEL_CACHE_DIR`. If not specified, user's cache directory is used.",
),
weight_download_region: RegionEnum = typer.Option(
None, help="Server region for downloading weights."
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to prediction.",
),
):
"""Estimate animal distance at each second in the video."""
predict_dict = dict(filepaths=filepaths)
# override if any command line arguments are passed
if data_dir is not None:
predict_dict["data_dir"] = data_dir
if save_to is not None:
predict_dict["save_to"] = save_to
if overwrite is not None:
predict_dict["overwrite"] = overwrite
if batch_size is not None:
predict_dict["batch_size"] = batch_size
if num_workers is not None:
predict_dict["num_workers"] = num_workers
if gpus is not None:
predict_dict["gpus"] = gpus
if model_cache_dir is not None:
predict_dict["model_cache_dir"] = model_cache_dir
if weight_download_region is not None:
predict_dict["weight_download_region"] = weight_download_region
try:
depth_config = DepthEstimationConfig(**predict_dict)
except ValidationError as ex:
logger.error(f"Invalid configuration: {ex}")
raise typer.Exit(1)
msg = f"""The following configuration will be used for inference:
Filepath csv: {predict_dict["filepaths"]}
Data directory: {depth_config.data_dir}
Save to: {depth_config.save_to}
Overwrite: {depth_config.overwrite}
Batch size: {depth_config.batch_size}
Number of workers: {depth_config.num_workers}
GPUs: {depth_config.gpus}
Model cache: {depth_config.model_cache_dir}
Weight download region: {depth_config.weight_download_region}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to prediction.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
if yes:
# kick off prediction
depth_config.run_model()
if __name__ == "__main__":
app()
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/exceptions.py | zamba/exceptions.py | from typing import Union
class ZambaFfmpegException(Exception):
def __init__(self, stderr: Union[bytes, str]):
message = stderr.decode("utf8", errors="replace") if isinstance(stderr, bytes) else stderr
super().__init__(f"Video loading failer with error:\n{message}")
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/version.py | zamba/version.py | import sys
if sys.version_info[:2] >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
__version__ = importlib_metadata.version("zamba")
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/image_cli.py | zamba/image_cli.py | from collections.abc import Callable
import os
from pathlib import Path
from click.core import ParameterSource
from loguru import logger
from pydantic.error_wrappers import ValidationError
import typer
import yaml
from zamba.images.bbox import BboxInputFormat
from zamba.images.config import (
ImageClassificationPredictConfig,
ImageClassificationTrainingConfig,
ImageModelEnum,
ResultsFormat,
)
from zamba.images.manager import ZambaImagesManager
from zamba.models.utils import RegionEnum
app = typer.Typer(pretty_exceptions_show_locals=False)
def get_cli_args(ctx: typer.Context, source=ParameterSource.COMMANDLINE) -> dict:
"""
Returns a dictionary of arguments that were explicitly passed via CLI.
"""
cli_args = {}
for param_name, value in ctx.params.items():
# Check if the parameter was provided via CLI
param_source = ctx.get_parameter_source(param_name)
if param_source == source:
cli_args[param_name] = value
return cli_args
def consolidate_options(
ctx: typer.Context,
config_file: Path | None,
config_key,
mutators: dict[str, Callable] = {},
to_drop: list[str] = [],
) -> dict:
"""Bring together options from different sources into a single dictionary using clear precedence rules."""
# Load the cli defaults first
options = get_cli_args(ctx, ParameterSource.DEFAULT)
# Drop anything that defaults to None in favor of the pydantic defaults
keys_to_delete = [key for key, value in options.items() if value is None]
for key in keys_to_delete:
del options[key]
# Then load environment variables
options.update(get_cli_args(ctx, ParameterSource.ENVIRONMENT))
# Then load the configuration file if provided
if config_file is not None:
with config_file.open() as f:
config_dict = yaml.safe_load(f)
options.update(config_dict[config_key])
# Then add any CLI arguments that were explicitly passed
cli_args = get_cli_args(ctx)
for mututor_name, mutator in mutators.items():
if mututor_name in cli_args:
new_key, value = mutator(cli_args[mututor_name])
cli_args[new_key] = value
options.update(cli_args)
# Drop anything that's not for the config object
for key in to_drop:
if key in options:
del options[key]
return options
@app.command("predict")
def predict(
ctx: typer.Context,
data_dir: Path = typer.Option(None, exists=True, help="Path to folder containing images."),
filepaths: Path = typer.Option(
None,
exists=True,
help="Path to csv containing `filepath` column with image paths.",
),
model: ImageModelEnum = typer.Option(
None,
help="Model to use for inference. Model will be superseded by checkpoint if provided.",
),
checkpoint: Path = typer.Option(
default=None,
exists=True,
help="Path to model checkpoint.",
),
save: bool = typer.Option(
None,
help="Whether to save out predictions. If you want to specify the output directory, use save_dir instead.",
),
save_dir: Path = typer.Option(
None,
help="An optional directory or csv path for saving the output. Defaults to `.csv` file in the working directory.",
),
results_file_name: Path = typer.Option(
None,
help="The filename for the output predictions in the save directory.",
),
results_file_format: ResultsFormat = typer.Option(
None,
help="The format in which to output the predictions. Currently `csv` and `megadetector` JSON formats are supported.",
),
config: Path | None = typer.Option(
None,
exists=True,
help="Specify options using yaml configuration file instead of through command line options.",
),
detections_threshold: float = typer.Option(
None,
help="An optional threshold for detector to detect animal on image. Defaults 0.2.",
),
weight_download_region: RegionEnum = typer.Option(
None, help="Server region for downloading weights."
),
overwrite: bool = typer.Option(
None, "--overwrite", "-o", help="Overwrite outputs in the save directory if they exist."
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to prediction.",
),
):
"""Identify species in an image. Option defaults are visible in the ImageClassificationPredictConfig class."""
predict_dict = consolidate_options(
ctx,
config_file=config,
config_key="predict_config",
mutators={"model": lambda x: ("model_name", x.value)},
to_drop=["config", "yes", "batch_size", "model"],
)
try:
image_config = ImageClassificationPredictConfig(**predict_dict)
except ValidationError as ex:
logger.error(f"Invalid configuration: {ex}")
raise typer.Exit(1)
msg = f"""The following configuration will be used for inference:
Filepath csv: {predict_dict["filepaths"] if "filepaths" in predict_dict else "Not provided"}
Data directory: {image_config.data_dir}
Model cache: {image_config.checkpoint}
Detections threshold: {image_config.detections_threshold}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to prediction.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
if yes:
# kick off prediction
manager = ZambaImagesManager()
manager.predict(image_config)
@app.command("train")
def train(
ctx: typer.Context,
data_dir: Path = typer.Option(
None,
exists=True,
file_okay=False,
dir_okay=True,
help="Path to the images directory.",
),
labels: Path = typer.Option(
None, exists=True, file_okay=True, dir_okay=False, help="Path to the labels."
),
model: ImageModelEnum = typer.Option(
None,
help="Model to fine-tune. Model will be superseded by checkpoint if provided.",
),
model_checkpoint: Path = typer.Option(
None,
exists=True,
help="Path to model checkpoint to resume training.",
),
config: Path | None = typer.Option(
None,
exists=True,
help="Specify options using yaml configuration file instead of through command line options.",
),
lr: float = typer.Option(None, help="Learning rate."),
batch_size: int = typer.Option(
None,
help="Batch size to use for training. With a value of 'None', we will try to find a maximal batch size that still fits within memory.",
),
accumulated_batch_size: int = typer.Option(
None, help="Accumulated batch size; will accumulate gradients to this virtual batch size."
),
max_epochs: int = typer.Option(None, help="Max training epochs."),
cache_dir: Path = typer.Option(
None,
file_okay=False,
dir_okay=True,
help="Path to the folder where clipped images will be saved. Applies only to training with images cropping (e.g. with bbox from coco format).",
),
save_dir: Path = typer.Option(
None,
help="An optional directory for saving the output. Defaults to the current working directory.",
exists=True,
file_okay=False,
dir_okay=True,
),
early_stopping_patience: int = typer.Option(
None,
help="Number of epochs with no improvement after which training will be stopped.",
),
image_size: int = typer.Option(None, help="Image size after resize."),
no_crop_images: bool = typer.Option(
None,
"--no-crop-images",
help="Pass if you have already cropped images to the bounding box of the animal.",
),
extra_train_augmentations: bool = typer.Option(
None,
"--extra-train-augmentations",
help="Use extra train augmentations.",
),
labels_format: BboxInputFormat = typer.Option(
None, help="Format for bounding box annotations when labels are provided as JSON."
),
num_workers: int = typer.Option(
None,
help="Number of subprocesses to use for data loading.",
),
devices: str = typer.Option(
None,
help="Pytorch Lightning devices parameter (number or which GPUs to use for training).",
),
accelerator: str = typer.Option(
None,
help="Pytorch Lightning accelerator parameter; e.g. 'cpu' or 'gpu' uses GPU if available.",
),
mlflow_tracking_uri: str = typer.Option(None, help="MLFlow tracking URI"),
mlflow_experiment_name: str = typer.Option(
None,
exists=True,
file_okay=False,
dir_okay=True,
help="Classification experiment name (MLFlow).",
),
checkpoint_path: str = typer.Option(None, help="Dir to save training checkpoints"),
from_scratch: bool = typer.Option(
None,
"--from-scratch",
help="Training from scratch.",
),
weighted_loss: bool = typer.Option(
None,
"--weighted-loss",
help="Use weighted cross entropy as loss.",
),
yes: bool = typer.Option(
False,
"--yes",
"-y",
help="Skip confirmation of configuration and proceed right to prediction.",
),
):
"""Train image classifier."""
skip_confirmation = (
os.getenv("LOCAL_RANK", 0) != 0
) # True if distributed training has already started (and this is second process)
training_dict = consolidate_options(
ctx,
config_file=config,
config_key="train_config",
mutators={
"model": lambda x: ("model_name", x.value),
"mlflow_experiment_name": lambda x: ("name", x),
"model_checkpoint": lambda x: ("checkpoint", x),
"no_crop_images": lambda x: ("crop_images", not x),
},
to_drop=[
"batch_size",
"config",
"mlflow_experiment_name",
"model",
"model_checkpoint",
"no_crop_images",
"yes",
],
)
required_args = ("data_dir", "labels")
for required_arg in required_args:
if training_dict[required_arg] is None:
raise RuntimeError(f"`{required_arg}` argument is required")
try:
image_config = ImageClassificationTrainingConfig(**training_dict)
except ValidationError as ex:
logger.error(f"Invalid configuration: {ex}")
raise typer.Exit(1)
# Only show confirmation on main process
if not skip_confirmation:
msg = f"""The following configuration will be used for training:
Filepath csv: {training_dict["labels"]}
Data directory: {training_dict["data_dir"]}
Base model name: {image_config.model_name}
Cache dir: {image_config.cache_dir}
Learning rate: {image_config.lr}
Batch size: {image_config.batch_size}
Accumulated batch size: {image_config.accumulated_batch_size}
Max epochs: {image_config.max_epochs}
Early stopping patience: {image_config.early_stopping_patience}
Num workers: {image_config.num_workers}
Accelerator: {image_config.accelerator}
Devices: {image_config.devices}
MLFlow tracking URI: {image_config.mlflow_tracking_uri}
MLFlow classification experiment name: {image_config.name}
Checkpoints dir: {image_config.checkpoint_path}
Model checkpoint: {image_config.checkpoint}
Weighted loss: {image_config.weighted_loss}
Extra train augmentations: {image_config.extra_train_augmentations}
"""
if yes:
typer.echo(f"{msg}\n\nSkipping confirmation and proceeding to prediction.")
else:
yes = typer.confirm(
f"{msg}\n\nIs this correct?",
abort=False,
default=True,
)
else:
yes = True # Non-main processes should always proceed
if yes:
# kick off training
manager = ZambaImagesManager()
manager.train(image_config)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/__main__.py | zamba/__main__.py | from zamba.cli import app
app(prog_name="python -m zamba")
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/settings.py | zamba/settings.py | import os
from pathlib import Path
import appdirs
VIDEO_SUFFIXES = os.environ.get("VIDEO_SUFFIXES")
if VIDEO_SUFFIXES is not None:
VIDEO_SUFFIXES = VIDEO_SUFFIXES.split(",")
else:
VIDEO_SUFFIXES = [".avi", ".mp4", ".asf"]
# random seed to use for splitting data without site info into train / val / holdout sets
SPLIT_SEED = os.environ.get("SPLIT_SEED", 4007)
# experimental support for predicting on images
IMAGE_SUFFIXES = [
ext.strip() for ext in os.environ.get("IMAGE_SUFFIXES", ".jpg,.jpeg,.png,.webp").split(",")
]
def get_model_cache_dir():
model_cache_dir = Path(
os.environ.get("MODEL_CACHE_DIR", Path(appdirs.user_cache_dir()) / "zamba")
)
model_cache_dir.mkdir(parents=True, exist_ok=True)
return model_cache_dir
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/__init__.py | zamba/__init__.py | import os
from pathlib import Path
import sys
from loguru import logger
from zamba.version import __version__
__version__
logger.remove()
log_level = os.getenv("LOG_LEVEL", "INFO")
logger.add(sys.stderr, level=log_level)
MODELS_DIRECTORY = Path(__file__).parents[1] / "zamba" / "models" / "official_models"
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/utils_cli.py | zamba/utils_cli.py | from itertools import chain
from pathlib import Path
from loguru import logger
from tqdm import tqdm
import typer
import yaml
from zamba import MODELS_DIRECTORY
from zamba.images.config import ImageModelEnum
from zamba.models.config import ModelEnum
from zamba.models.densepose.densepose_manager import MODELS as DENSEPOSE_MODELS
from zamba.models.utils import download_weights
from zamba.settings import get_model_cache_dir
app = typer.Typer(pretty_exceptions_show_locals=False)
@app.command("dl-weights")
def dl_weights(
cache_dir: Path = typer.Option(
None,
exists=False,
help="Path to directory for downloading model weights. Alternatively, specify with environment variable `MODEL_CACHE_DIR`. If not specified, user's cache directory is used.",
),
overwrite: bool = typer.Option(
False,
"--overwrite",
"-o",
help="Overwrite existing weights in the cache directory.",
),
):
cache_dir = cache_dir or get_model_cache_dir()
weights_filenames = []
# get weights filenames for model files
model_names = list(chain(ModelEnum.__members__.keys(), ImageModelEnum.__members__.values()))
for model_name in model_names:
with (MODELS_DIRECTORY / model_name / "config.yaml").open() as f:
config_dict = yaml.safe_load(f)
weights_filenames.append(config_dict["public_checkpoint"])
# get weights filenames for densepose models
for model_name, model_config in DENSEPOSE_MODELS.items():
weights_filenames.append(model_config["weights"])
# download weights
for weights_filename in tqdm(weights_filenames, desc="Downloading weights for all models..."):
cache_path = cache_dir / weights_filename
if not overwrite and cache_path.exists():
logger.info(
f"Weights {weights_filename} already exist in {cache_path}, skipping. Use --overwrite to download again."
)
continue
logger.info(f"Downloading weights {weights_filename} to {cache_path}")
download_weights(weights_filename, cache_path)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/pytorch_lightning/video_modules.py | zamba/pytorch_lightning/video_modules.py | from multiprocessing import cpu_count
from multiprocessing.context import BaseContext
from typing import Dict, Optional, Tuple, TYPE_CHECKING
import numpy as np
import pandas as pd
import torch
import torch.nn.functional as F
import torch.utils.data
from pytorch_lightning import LightningDataModule
from sklearn.metrics import accuracy_score, f1_score, top_k_accuracy_score
from torchvision.transforms import transforms
from zamba.metrics import compute_species_specific_metrics
from zamba.pytorch.dataloaders import get_datasets
from zamba.pytorch.transforms import ConvertTHWCtoCTHW
from zamba.pytorch_lightning.base_module import ZambaClassificationLightningModule
default_transform = transforms.Compose(
[
ConvertTHWCtoCTHW(),
transforms.ConvertImageDtype(torch.float32),
]
)
if TYPE_CHECKING:
from zamba.pytorch.dataloaders import VideoLoaderConfig
DEFAULT_TOP_K = (1, 3, 5, 10)
class ZambaVideoDataModule(LightningDataModule):
def __init__(
self,
batch_size: int = 1,
num_workers: int = max(cpu_count() - 1, 1),
transform: transforms.Compose = default_transform,
video_loader_config: Optional["VideoLoaderConfig"] = None,
prefetch_factor: int = 2,
train_metadata: Optional[pd.DataFrame] = None,
predict_metadata: Optional[pd.DataFrame] = None,
multiprocessing_context: Optional[str] = "forkserver",
*args,
**kwargs,
):
self.batch_size = batch_size
self.num_workers = num_workers # Number of parallel processes fetching data
self.prefetch_factor = prefetch_factor
self.video_loader_config = (
None if video_loader_config is None else video_loader_config.dict()
)
self.train_metadata = train_metadata
self.predict_metadata = predict_metadata
(
self.train_dataset,
self.val_dataset,
self.test_dataset,
self.predict_dataset,
) = get_datasets(
train_metadata=train_metadata,
predict_metadata=predict_metadata,
transform=transform,
video_loader_config=video_loader_config,
)
self.multiprocessing_context: BaseContext = (
None
if (multiprocessing_context is None) or (num_workers == 0)
else multiprocessing_context
)
super().__init__(*args, **kwargs)
def train_dataloader(self) -> Optional[torch.utils.data.DataLoader]:
if self.train_dataset:
return torch.utils.data.DataLoader(
self.train_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=True,
multiprocessing_context=self.multiprocessing_context,
prefetch_factor=self.prefetch_factor,
persistent_workers=self.num_workers > 0,
)
def val_dataloader(self) -> Optional[torch.utils.data.DataLoader]:
if self.val_dataset:
return torch.utils.data.DataLoader(
self.val_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
multiprocessing_context=self.multiprocessing_context,
prefetch_factor=self.prefetch_factor,
persistent_workers=self.num_workers > 0,
)
def test_dataloader(self) -> Optional[torch.utils.data.DataLoader]:
if self.test_dataset:
return torch.utils.data.DataLoader(
self.test_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
multiprocessing_context=self.multiprocessing_context,
prefetch_factor=self.prefetch_factor,
persistent_workers=self.num_workers > 0,
)
def predict_dataloader(self) -> Optional[torch.utils.data.DataLoader]:
if self.predict_dataset:
return torch.utils.data.DataLoader(
self.predict_dataset,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
multiprocessing_context=self.multiprocessing_context,
prefetch_factor=self.prefetch_factor,
persistent_workers=True,
)
class ZambaVideoClassificationLightningModule(ZambaClassificationLightningModule):
def on_train_start(self):
metrics = {"val_macro_f1": {}}
if self.num_classes > 2:
metrics.update(
{f"val_top_{k}_accuracy": {} for k in DEFAULT_TOP_K if k < self.num_classes}
)
else:
metrics.update({"val_accuracy": {}})
# write hparams to hparams.yaml file, log metrics to tb hparams tab
self.logger.log_hyperparams(self.hparams, metrics)
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.binary_cross_entropy_with_logits(y_hat, y)
self.log("train_loss", loss.detach())
self.training_step_outputs.append(loss)
return loss
def _val_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.binary_cross_entropy_with_logits(y_hat, y)
self.log("val_loss", loss.detach())
y_proba = torch.sigmoid(y_hat.cpu()).numpy()
return {
"y_true": y.cpu().numpy().astype(int),
"y_pred": y_proba.round().astype(int),
"y_proba": y_proba,
}
def validation_step(self, batch, batch_idx):
output = self._val_step(batch, batch_idx)
self.validation_step_outputs.append(output)
return output
def test_step(self, batch, batch_idx):
output = self._val_step(batch, batch_idx)
self.test_step_outputs.append(output)
return output
@staticmethod
def aggregate_step_outputs(
outputs: Dict[str, np.ndarray],
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
y_true = np.vstack([output["y_true"] for output in outputs])
y_pred = np.vstack([output["y_pred"] for output in outputs])
y_proba = np.vstack([output["y_proba"] for output in outputs])
return y_true, y_pred, y_proba
def compute_and_log_metrics(
self, y_true: np.ndarray, y_pred: np.ndarray, y_proba: np.ndarray, subset: str
):
self.log(
f"{subset}_macro_f1",
f1_score(y_true, y_pred, average="macro", zero_division=0),
)
# if only two classes, skip top_k accuracy since not enough classes
if self.num_classes > 2:
for k in DEFAULT_TOP_K:
if k < self.num_classes:
self.log(
f"{subset}_top_{k}_accuracy",
top_k_accuracy_score(
y_true.argmax(
axis=1
), # top k accuracy only supports single label case
y_proba,
labels=np.arange(y_proba.shape[1]),
k=k,
),
)
else:
self.log(f"{subset}_accuracy", accuracy_score(y_true, y_pred))
for metric_name, label, metric in compute_species_specific_metrics(
y_true, y_pred, self.species
):
self.log(f"species/{subset}_{metric_name}/{label}", metric)
def on_validation_epoch_end(self):
"""Aggregates validation_step outputs to compute and log the validation macro F1 and top K
metrics.
Args:
outputs (List[dict]): list of output dictionaries from each validation step
containing y_pred and y_true.
"""
y_true, y_pred, y_proba = self.aggregate_step_outputs(self.validation_step_outputs)
self.compute_and_log_metrics(y_true, y_pred, y_proba, subset="val")
self.validation_step_outputs.clear() # free memory
def on_test_epoch_end(self):
y_true, y_pred, y_proba = self.aggregate_step_outputs(self.test_step_outputs)
self.compute_and_log_metrics(y_true, y_pred, y_proba, subset="test")
self.test_step_outputs.clear() # free memory
def predict_step(self, batch, batch_idx, dataloader_idx: Optional[int] = None):
x, y = batch
y_hat = self(x)
pred = torch.sigmoid(y_hat).cpu().numpy()
return pred
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/pytorch_lightning/__init__.py | zamba/pytorch_lightning/__init__.py | python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false | |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/pytorch_lightning/base_module.py | zamba/pytorch_lightning/base_module.py | import os
import warnings
from typing import List, Optional
import pytorch_lightning as pl
import torch
import torch.utils.data
from pytorch_lightning import LightningModule
from torchvision.transforms import transforms
from zamba.pytorch.transforms import ConvertTHWCtoCTHW
default_transform = transforms.Compose(
[
ConvertTHWCtoCTHW(),
transforms.ConvertImageDtype(torch.float32),
]
)
class ZambaClassificationLightningModule(LightningModule):
def __init__(
self,
species: List[str],
lr: float = 1e-3,
scheduler: Optional[str] = None,
scheduler_params: Optional[dict] = None,
**kwargs,
):
super().__init__()
if (scheduler is None) and (scheduler_params is not None):
warnings.warn(
"scheduler_params provided without scheduler. scheduler_params will have no effect."
)
self.lr = lr
self.species = species
self.num_classes = len(species)
if scheduler is not None:
self.scheduler = torch.optim.lr_scheduler.__dict__[scheduler]
else:
self.scheduler = scheduler
self.scheduler_params = scheduler_params
self.model_class = type(self).__name__
self.save_hyperparameters("lr", "scheduler", "scheduler_params", "species")
self.hparams["model_class"] = self.model_class
self.training_step_outputs = []
self.validation_step_outputs = []
self.test_step_outputs = []
def forward(self, x):
return self.model(x)
def _get_optimizer(self):
return torch.optim.Adam(filter(lambda p: p.requires_grad, self.parameters()), lr=self.lr)
def configure_optimizers(self):
"""
Setup the Adam optimizer. Note, that this function also can return a lr scheduler, which is
usually useful for training video models.
"""
optim = self._get_optimizer()
if self.scheduler is None:
return optim
else:
return {
"optimizer": optim,
"lr_scheduler": self.scheduler(
optim, **({} if self.scheduler_params is None else self.scheduler_params)
),
}
def to_disk(self, path: os.PathLike):
"""Save out model weights to a checkpoint file on disk.
Note: this does not include callbacks, optimizer_states, or lr_schedulers.
To include those, use `Trainer.save_checkpoint()` instead.
"""
checkpoint = {
"state_dict": self.state_dict(),
"hyper_parameters": self.hparams,
"global_step": self.global_step,
"pytorch-lightning_version": pl.__version__,
}
torch.save(checkpoint, path)
@classmethod
def from_disk(cls, path: os.PathLike, **kwargs):
# note: we always load models onto CPU; moving to GPU is handled by `devices` in pl.Trainer
return cls.load_from_checkpoint(path, map_location="cpu", **kwargs)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/publish_models.py | zamba/models/publish_models.py | import hashlib
from pathlib import Path
from tempfile import TemporaryDirectory
import urllib.request
from cloudpathlib import AnyPath, S3Path
from loguru import logger
import yaml
from zamba import MODELS_DIRECTORY
from zamba.models.config import WEIGHT_LOOKUP, ModelEnum
from zamba.models.densepose import MODELS as DENSEPOSE_MODELS
from zamba.models.depth_estimation import MODELS as DEPTH_MODELS
def get_model_only_params(full_configuration, subset="train_config"):
"""Return only params that are not data or machine specific.
Used for generating official configs.
"""
if subset == "train_config":
config = full_configuration[subset]
for key in [
"data_dir",
"dry_run",
"batch_size",
"auto_lr_find",
"gpus",
"num_workers",
"max_epochs",
"weight_download_region",
"split_proportions",
"save_dir",
"overwrite",
"skip_load_validation",
"from_scratch",
"model_cache_dir",
"use_default_model_labels",
"predict_all_zamba_species",
]:
try:
config.pop(key)
except: # noqa: E722
continue
elif subset == "video_loader_config":
config = full_configuration[subset]
if "megadetector_lite_config" in config.keys():
config["megadetector_lite_config"].pop("device")
for key in ["cache_dir", "cleanup_cache"]:
config.pop(key)
return config
def publish_model(model_name, trained_model_dir):
"""
Creates the files for the model folder in `official_models` and uploads the model to the three
DrivenData public s3 buckets.
Args:
model_name (ModelEnum): Model name which will be folder name in `official_models`.
trained_model_dir (AnyPath): Directory containing model checkpoint file,
train_configuration.yaml, predict_configuration.yaml, config.yaml, hparams.yaml, and
val_metrics.json.
"""
checkpoints = list(AnyPath(trained_model_dir).rglob("*.ckpt"))
if len(checkpoints) > 1:
raise ValueError(
f"{len(checkpoints)} were found in {trained_model_dir}. There can only be one. Checkpoints found include: {checkpoints}"
)
elif len(checkpoints) == 0:
raise ValueError(f"No checkpoint files were found in {trained_model_dir}.")
else:
private_checkpoint = checkpoints[0]
# configs are expected to be in the same folder as model checkpoint
trained_model_dir = AnyPath(private_checkpoint).parent
# make model directory
(MODELS_DIRECTORY / model_name).mkdir(exist_ok=True, parents=True)
# copy over files from model directory
logger.info(
f"Copying over yaml and json files from {trained_model_dir} to {MODELS_DIRECTORY / model_name}."
)
for file in [
"train_configuration.yaml",
"predict_configuration.yaml",
"config.yaml",
"hparams.yaml",
"val_metrics.json",
]:
(AnyPath(trained_model_dir) / file).copy(MODELS_DIRECTORY / model_name)
# prepare config for use in official models dir
logger.info("Preparing official config file.")
# start with full train configuration
with (MODELS_DIRECTORY / model_name / "train_configuration.yaml").open() as f:
train_configuration_full_dict = yaml.safe_load(f)
# get limited train config
train_config = get_model_only_params(train_configuration_full_dict, subset="train_config")
# e.g. european model is trained from a checkpoint; we want to expose final model
# (model_name: european) not the base checkpoint
if "checkpoint" in train_config.keys():
train_config.pop("checkpoint")
train_config["model_name"] = model_name
official_config = dict(
train_config=train_config,
video_loader_config=get_model_only_params(
train_configuration_full_dict, subset="video_loader_config"
),
predict_config=dict(model_name=model_name),
)
# hash train_configuration to generate public filename for model
hash_str = hashlib.sha1(str(train_configuration_full_dict).encode("utf-8")).hexdigest()[:10]
public_file_name = f"{model_name}_{hash_str}.ckpt"
# add that to official config
official_config["public_checkpoint"] = public_file_name
# write out official config
config_yaml = MODELS_DIRECTORY / model_name / "config.yaml"
logger.info(f"Writing out to {config_yaml}")
with config_yaml.open("w") as f:
yaml.dump(official_config, f, sort_keys=False)
upload_to_all_public_buckets(private_checkpoint, public_file_name)
def upload_to_all_public_buckets(file, public_file_name):
# upload to three public buckets
for bucket in ["", "-eu", "-asia"]:
public_checkpoint = S3Path(
f"s3://drivendata-public-assets{bucket}/zamba_official_models/{public_file_name}"
)
if public_checkpoint.exists():
logger.info(f"Skipping since {public_checkpoint} exists.")
else:
logger.info(f"Uploading {file} to {public_checkpoint}")
public_checkpoint.upload_from(file, force_overwrite_to_cloud=True)
if __name__ == "__main__":
for model_name in ModelEnum.__members__.keys():
private_checkpoint = WEIGHT_LOOKUP[model_name]
logger.info(f"\n============\nPreparing {model_name} model\n============")
publish_model(model_name, private_checkpoint)
for name, model in DEPTH_MODELS.items():
logger.info(f"\n============\nPreparing {name} model\n============")
# upload to the zamba buckets, renaming to model["weights"]
upload_to_all_public_buckets(S3Path(model["private_weights_url"]), model["weights"])
for name, model in DENSEPOSE_MODELS.items():
logger.info(f"\n============\nPreparing DensePose model: {name}\n============")
if S3Path(
f"s3://drivendata-public-assets/zamba_official_models/{model['weights']}"
).exists():
logger.info("Skipping since model exists on main public S3 bucket.")
continue
with TemporaryDirectory() as tmpdir:
tmpdir = Path(tmpdir)
tmp_download_path = tmpdir / model["weights"]
# download from the facebook servers
logger.info(f"Downloading weights: {model['densepose_weights_url']}")
urllib.request.urlretrieve(model["densepose_weights_url"], tmp_download_path)
# upload to the zamba buckets, renaming to model["weights"]
logger.info(f"Uploading to zamba buckets: {model['weights']}")
upload_to_all_public_buckets(tmp_download_path, model["weights"])
# remove local temp file that was downloaded
tmp_download_path.unlink()
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/registry.py | zamba/models/registry.py | from zamba.pytorch_lightning.base_module import ZambaClassificationLightningModule
available_models = {}
def register_model(cls):
"""Used to decorate subclasses of ZambaClassificationLightningModule so that they are
included in available_models."""
if not issubclass(cls, ZambaClassificationLightningModule):
raise TypeError(
"Cannot register object that is not a subclass of "
"ZambaClassificationLightningModule."
)
available_models[cls.__name__] = cls
return cls
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/slowfast_models.py | zamba/models/slowfast_models.py | import os
from typing import Optional, Tuple, Union
from pytorchvideo.models.head import ResNetBasicHead
import torch
from zamba.models.registry import register_model
from zamba.pytorch.utils import build_multilayer_perceptron
from zamba.pytorch_lightning.video_modules import ZambaVideoClassificationLightningModule
@register_model
class SlowFast(ZambaVideoClassificationLightningModule):
"""Pretrained SlowFast model for fine-tuning with the following architecture:
Input -> SlowFast Base (including trainable Backbone) -> Res Basic Head -> Output
Attributes:
backbone (torch.nn.Module): When scheduling the backbone to train with the
`BackboneFinetune` callback, this indicates the trainable part of the base.
base (torch.nn.Module): The entire model prior to the head.
head (torch.nn.Module): The trainable head.
_backbone_output_dim (int): Dimensionality of the backbone output (and head input).
"""
_default_model_name = "slowfast" # used to look up default configuration for checkpoints
def __init__(
self,
backbone_mode: str = "train",
post_backbone_dropout: Optional[float] = None,
output_with_global_average: bool = True,
head_dropout_rate: Optional[float] = None,
head_hidden_layer_sizes: Optional[Tuple[int]] = None,
finetune_from: Optional[Union[os.PathLike, str]] = None,
**kwargs,
):
"""Initializes the SlowFast model.
Args:
backbone_mode (str): If "eval", treat the backbone as a feature extractor
and set to evaluation mode in all forward passes.
post_backbone_dropout (float, optional): Dropout that operates on the output of the
backbone + pool (before the fully-connected layer in the head).
output_with_global_average (bool): If True, apply an adaptive average pooling
operation after the fully-connected layer in the head.
head_dropout_rate (float, optional): Optional dropout rate applied after backbone and
between projection layers in the head.
head_hidden_layer_sizes (tuple of int): If not None, the size of hidden layers in the
head multilayer perceptron.
finetune_from (pathlike or str, optional): If not None, load an existing model from
the path and resume training from an existing model.
"""
super().__init__(**kwargs)
if finetune_from is None:
self.initialize_from_torchub()
else:
model = self.from_disk(finetune_from)
self._backbone_output_dim = model.head.proj.in_features
self.backbone = model.backbone
self.base = model.base
for param in self.base.parameters():
param.requires_grad = False
head = ResNetBasicHead(
proj=build_multilayer_perceptron(
self._backbone_output_dim,
head_hidden_layer_sizes,
self.num_classes,
activation=torch.nn.ReLU,
dropout=head_dropout_rate,
output_activation=None,
),
activation=None,
pool=None,
dropout=(
None if post_backbone_dropout is None else torch.nn.Dropout(post_backbone_dropout)
),
output_pool=torch.nn.AdaptiveAvgPool3d(1),
)
self.backbone_mode = backbone_mode
self.head = head
self.save_hyperparameters(
"backbone_mode",
"head_dropout_rate",
"head_hidden_layer_sizes",
"output_with_global_average",
"post_backbone_dropout",
)
def initialize_from_torchub(self):
"""Loads SlowFast model from torchhub and prepares ZambaVideoClassificationLightningModule
by removing the head and setting the backbone and base."""
# workaround for pytorch bug
torch.hub._validate_not_a_forked_repo = lambda a, b, c: True
base = torch.hub.load(
"facebookresearch/pytorchvideo:0.1.3", model="slowfast_r50", pretrained=True
)
self._backbone_output_dim = base.blocks[-1].proj.in_features
base.blocks = base.blocks[:-1] # Remove the pre-trained head
# self.backbone attribute lets `BackboneFinetune` freeze and unfreeze that module
self.backbone = base.blocks[-2:]
self.base = base
def forward(self, x, *args, **kwargs):
if self.backbone_mode == "eval":
self.base.eval()
x = self.base(x)
return self.head(x)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/efficientnet_models.py | zamba/models/efficientnet_models.py | import os
from typing import Optional, Union
import timm
import torch
from torch import nn
from zamba.models.registry import register_model
from zamba.pytorch.layers import TimeDistributed
from zamba.pytorch_lightning.video_modules import ZambaVideoClassificationLightningModule
@register_model
class TimeDistributedEfficientNet(ZambaVideoClassificationLightningModule):
_default_model_name = (
"time_distributed" # used to look up default configuration for checkpoints
)
def __init__(
self,
num_frames=16,
finetune_from: Optional[Union[os.PathLike, str]] = None,
**kwargs,
):
super().__init__(**kwargs)
if finetune_from is None:
efficientnet = timm.create_model("efficientnetv2_rw_m", pretrained=True)
efficientnet.classifier = nn.Identity()
else:
efficientnet = self.from_disk(finetune_from).base.module
# freeze base layers
for param in efficientnet.parameters():
param.requires_grad = False
num_backbone_final_features = efficientnet.num_features
self.backbone = torch.nn.ModuleList(
[
efficientnet.get_submodule("blocks.5"),
efficientnet.conv_head,
efficientnet.bn2,
efficientnet.global_pool,
]
)
self.base = TimeDistributed(efficientnet, tdim=1)
self.classifier = nn.Sequential(
nn.Linear(num_backbone_final_features, 256),
nn.Dropout(0.2),
nn.ReLU(),
nn.Linear(256, 64),
nn.Flatten(),
nn.Linear(64 * num_frames, self.num_classes),
)
self.save_hyperparameters("num_frames")
def forward(self, x):
self.base.eval()
x = self.base(x)
return self.classifier(x)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/utils.py | zamba/models/utils.py | import copy
from enum import Enum
from functools import lru_cache
import os
from pathlib import Path
from typing import Union
from cloudpathlib import S3Client, S3Path
import torch
import yaml
from zamba import MODELS_DIRECTORY
S3_BUCKET = "s3://drivendata-public-assets"
class RegionEnum(str, Enum):
us = "us"
eu = "eu"
asia = "asia"
def download_weights(
filename: str,
destination_dir: Union[os.PathLike, str],
weight_region: RegionEnum = RegionEnum("us"),
) -> Path:
# get s3 bucket based on region
if weight_region != "us":
region_bucket = f"{S3_BUCKET}-{weight_region}"
else:
region_bucket = S3_BUCKET
s3p = S3Path(
f"{region_bucket}/zamba_official_models/{filename}",
client=S3Client(local_cache_dir=destination_dir, no_sign_request=True),
)
s3p.download_to(destination_dir)
return str(Path(destination_dir) / s3p.name)
def get_model_checkpoint_filename(model_name):
if isinstance(model_name, Enum):
model_name = model_name.value
config_file = MODELS_DIRECTORY / model_name / "config.yaml"
with config_file.open() as f:
config_dict = yaml.safe_load(f)
return Path(config_dict["public_checkpoint"])
def get_default_hparams(model):
if isinstance(model, Enum):
model = model.value
hparams_file = MODELS_DIRECTORY / model / "hparams.yaml"
with hparams_file.open() as f:
return yaml.safe_load(f)
def get_checkpoint_hparams(checkpoint):
return copy.deepcopy(_cached_hparams(checkpoint))
@lru_cache()
def _cached_hparams(checkpoint):
return torch.load(checkpoint, weights_only=False, map_location=torch.device("cpu"))[
"hyper_parameters"
]
def get_model_species(checkpoint, model_name):
# hparams on checkpoint supersede base model
if checkpoint is not None:
model_species = get_checkpoint_hparams(checkpoint)["species"]
else:
model_species = get_default_hparams(model_name)["species"]
return model_species
def configure_accelerator_and_devices_from_gpus(gpus):
"""Derive accelerator and number of devices for pl.Trainer from user-specified number of gpus."""
if gpus > 0:
accelerator = "gpu"
devices = gpus
else:
accelerator = "cpu"
devices = "auto"
return accelerator, devices
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/model_manager.py | zamba/models/model_manager.py | from datetime import datetime
import json
import os
from pathlib import Path
from typing import Optional
import yaml
import git
from loguru import logger
import numpy as np
import pandas as pd
import pytorch_lightning as pl
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.loggers import TensorBoardLogger
from pytorch_lightning.strategies import DDPStrategy
from pytorch_lightning.tuner import Tuner
from zamba.data.video import VideoLoaderConfig
from zamba.models.config import (
ModelConfig,
ModelEnum,
MODEL_MAPPING,
SchedulerConfig,
TrainConfig,
PredictConfig,
)
from zamba.models.registry import available_models
from zamba.models.utils import (
configure_accelerator_and_devices_from_gpus,
get_checkpoint_hparams,
get_default_hparams,
)
from zamba.pytorch.finetuning import BackboneFinetuning
from zamba.pytorch_lightning.video_modules import (
ZambaVideoDataModule,
ZambaVideoClassificationLightningModule,
)
def instantiate_model(
checkpoint: os.PathLike,
labels: Optional[pd.DataFrame] = None,
scheduler_config: Optional[SchedulerConfig] = None,
from_scratch: Optional[bool] = None,
model_name: Optional[ModelEnum] = None,
use_default_model_labels: Optional[bool] = None,
species: Optional[list] = None,
) -> ZambaVideoClassificationLightningModule:
"""Instantiates the model from a checkpoint and detects whether the model head should be replaced.
The model head is replaced if labels contain species that are not on the model or use_default_model_labels=False.
Supports model instantiation for the following cases:
- train from scratch (from_scratch=True)
- finetune with new species (from_scratch=False, labels contains different species than model)
- finetune with a subset of zamba species and output only the species in the labels file (use_default_model_labels=False)
- finetune with a subset of zamba species but output all zamba species (use_default_model_labels=True)
- predict using pretrained model (labels=None)
Args:
checkpoint (path): Path to a checkpoint on disk.
labels (pd.DataFrame, optional): Dataframe where filepath is the index and columns are one hot encoded species.
scheduler_config (SchedulerConfig, optional): SchedulerConfig to use for training or finetuning.
Only used if labels is not None.
from_scratch (bool, optional): Whether to instantiate the model with base weights. This means starting
from the imagenet weights for image based models and the Kinetics weights for video models.
Only used if labels is not None.
model_name (ModelEnum, optional): Model name used to look up default hparams used for that model.
Only relevant if training from scratch.
use_default_model_labels (bool, optional): Whether to output the full set of default model labels rather than
just the species in the labels file. Only used if labels is not None.
species (list, optional): List of species in label order. If None, read from labels file.
Returns:
ZambaVideoClassificationLightningModule: Instantiated model
"""
if from_scratch:
hparams = get_default_hparams(model_name)
else:
hparams = get_checkpoint_hparams(checkpoint)
model_class = available_models[hparams["model_class"]]
logger.info(f"Instantiating model: {model_class.__name__}")
# predicting
if labels is None:
logger.info("Loading from checkpoint.")
model = model_class.from_disk(path=checkpoint, **hparams)
return model
# get species from labels file
if species is None:
species = labels.filter(regex=r"^species_").columns.tolist()
species = [s.split("species_", 1)[1] for s in species]
# train from scratch
if from_scratch:
logger.info("Training from scratch.")
# default would use scheduler used for pretrained model
if scheduler_config != "default":
hparams.update(scheduler_config.dict())
hparams.update({"species": species})
model = model_class(**hparams)
log_schedulers(model)
return model
# determine if finetuning or resuming training
# check if species in label file are a subset of pretrained model species
is_subset = set(species).issubset(set(hparams["species"]))
if is_subset:
if use_default_model_labels:
return resume_training(
scheduler_config=scheduler_config,
hparams=hparams,
model_class=model_class,
checkpoint=checkpoint,
)
else:
logger.info(
"Limiting only to species in labels file. Replacing model head and finetuning."
)
return replace_head(
scheduler_config=scheduler_config,
hparams=hparams,
species=species,
model_class=model_class,
checkpoint=checkpoint,
)
# without a subset, you will always get a new head
# the config validation prohibits setting use_default_model_labels to True without a subset
else:
logger.info(
"Provided species do not fully overlap with Zamba species. Replacing model head and finetuning."
)
return replace_head(
scheduler_config=scheduler_config,
hparams=hparams,
species=species,
model_class=model_class,
checkpoint=checkpoint,
)
def replace_head(scheduler_config, hparams, species, model_class, checkpoint):
# update in case we want to finetune with different scheduler
if scheduler_config != "default":
hparams.update(scheduler_config.dict())
hparams.update({"species": species})
model = model_class(finetune_from=checkpoint, **hparams)
log_schedulers(model)
return model
def resume_training(
scheduler_config,
hparams,
model_class,
checkpoint,
):
# resume training; add additional species columns to labels file if needed
logger.info(
"Provided species fully overlap with Zamba species. Resuming training from latest checkpoint."
)
# update in case we want to resume with different scheduler
if scheduler_config != "default":
hparams.update(scheduler_config.dict())
model = model_class.from_disk(path=checkpoint, **hparams)
log_schedulers(model)
return model
def log_schedulers(model):
logger.info(f"Using learning rate scheduler: {model.hparams['scheduler']}")
logger.info(f"Using scheduler params: {model.hparams['scheduler_params']}")
def validate_species(
model: ZambaVideoClassificationLightningModule, data_module: ZambaVideoDataModule
):
conflicts = []
for dataloader_name, dataloader in zip(
("Train", "Val", "Test"),
(
data_module.train_dataloader(),
data_module.val_dataloader(),
data_module.test_dataloader(),
),
):
if (dataloader is not None) and (dataloader.dataset.species != model.species):
conflicts.append(
f"""{dataloader_name} dataset includes:\n{", ".join(dataloader.dataset.species)}\n"""
)
if len(conflicts) > 0:
conflicts.append(f"""Model predicts:\n{", ".join(model.species)}""")
conflict_msg = "\n\n".join(conflicts)
raise ValueError(
f"""Dataloader species and model species do not match.\n\n{conflict_msg}"""
)
def train_model(
train_config: TrainConfig,
video_loader_config: Optional[VideoLoaderConfig] = None,
):
"""Trains a model.
Args:
train_config (TrainConfig): Pydantic config for training.
video_loader_config (VideoLoaderConfig, optional): Pydantic config for preprocessing videos.
If None, will use default for model specified in TrainConfig.
"""
# get default VLC for model if not specified
if video_loader_config is None:
video_loader_config = ModelConfig(
train_config=train_config, video_loader_config=video_loader_config
).video_loader_config
# set up model
model = instantiate_model(
checkpoint=train_config.checkpoint,
labels=train_config.labels,
scheduler_config=train_config.scheduler_config,
from_scratch=train_config.from_scratch,
model_name=train_config.model_name,
use_default_model_labels=train_config.use_default_model_labels,
)
data_module = ZambaVideoDataModule(
video_loader_config=video_loader_config,
transform=MODEL_MAPPING[model.__class__.__name__]["transform"],
train_metadata=train_config.labels,
batch_size=train_config.batch_size,
num_workers=train_config.num_workers,
)
validate_species(model, data_module)
train_config.save_dir.mkdir(parents=True, exist_ok=True)
# add folder version_n that auto increments if we are not overwriting
tensorboard_version = train_config.save_dir.name if train_config.overwrite else None
tensorboard_save_dir = (
train_config.save_dir.parent if train_config.overwrite else train_config.save_dir
)
tensorboard_logger = TensorBoardLogger(
save_dir=tensorboard_save_dir,
name=None,
version=tensorboard_version,
default_hp_metric=False,
)
logging_and_save_dir = (
tensorboard_logger.log_dir if not train_config.overwrite else train_config.save_dir
)
model_checkpoint = ModelCheckpoint(
dirpath=logging_and_save_dir,
filename=train_config.model_name,
monitor=(
train_config.early_stopping_config.monitor
if train_config.early_stopping_config is not None
else None
),
mode=(
train_config.early_stopping_config.mode
if train_config.early_stopping_config is not None
else "min"
),
)
callbacks = [model_checkpoint]
if train_config.early_stopping_config is not None:
callbacks.append(EarlyStopping(**train_config.early_stopping_config.dict()))
if train_config.backbone_finetune_config is not None:
callbacks.append(BackboneFinetuning(**train_config.backbone_finetune_config.dict()))
accelerator, devices = configure_accelerator_and_devices_from_gpus(train_config.gpus)
trainer = pl.Trainer(
accelerator=accelerator,
devices=devices,
max_epochs=train_config.max_epochs,
logger=tensorboard_logger,
callbacks=callbacks,
fast_dev_run=train_config.dry_run,
strategy=(
DDPStrategy(find_unused_parameters=False)
if (data_module.multiprocessing_context is not None) and (train_config.gpus > 1)
else "auto"
),
)
if video_loader_config.cache_dir is None:
logger.info("No cache dir is specified. Videos will not be cached.")
else:
logger.info(f"Videos will be cached to {video_loader_config.cache_dir}.")
if train_config.auto_lr_find:
logger.info("Finding best learning rate.")
tuner = Tuner(trainer)
tuner.lr_find(model=model, datamodule=data_module)
try:
git_hash = git.Repo(search_parent_directories=True).head.object.hexsha
except git.exc.InvalidGitRepositoryError:
git_hash = None
configuration = {
"git_hash": git_hash,
"model_class": model.model_class,
"species": model.species,
"starting_learning_rate": model.lr,
"train_config": json.loads(train_config.json(exclude={"labels"})),
"training_start_time": datetime.utcnow().isoformat(),
"video_loader_config": json.loads(video_loader_config.json()),
}
if not train_config.dry_run:
config_path = Path(logging_and_save_dir) / "train_configuration.yaml"
config_path.parent.mkdir(exist_ok=True, parents=True)
logger.info(f"Writing out full configuration to {config_path}.")
with config_path.open("w") as fp:
yaml.dump(configuration, fp)
logger.info("Starting training...")
trainer.fit(model, data_module)
if not train_config.dry_run:
if trainer.datamodule.test_dataloader() is not None:
logger.info("Calculating metrics on holdout set.")
test_metrics = trainer.test(
dataloaders=trainer.datamodule.test_dataloader(), ckpt_path="best"
)[0]
with (Path(logging_and_save_dir) / "test_metrics.json").open("w") as fp:
json.dump(test_metrics, fp, indent=2)
if trainer.datamodule.val_dataloader() is not None:
logger.info("Calculating metrics on validation set.")
val_metrics = trainer.validate(
dataloaders=trainer.datamodule.val_dataloader(), ckpt_path="best"
)[0]
with (Path(logging_and_save_dir) / "val_metrics.json").open("w") as fp:
json.dump(val_metrics, fp, indent=2)
return trainer
def predict_model(
predict_config: PredictConfig,
video_loader_config: VideoLoaderConfig = None,
):
"""Predicts from a model and writes out predictions to a csv.
Args:
predict_config (PredictConfig): Pydantic config for performing inference.
video_loader_config (VideoLoaderConfig, optional): Pydantic config for preprocessing videos.
If None, will use default for model specified in PredictConfig.
"""
# get default VLC for model if not specified
if video_loader_config is None:
video_loader_config = ModelConfig(
predict_config=predict_config, video_loader_config=video_loader_config
).video_loader_config
# set up model
model = instantiate_model(
checkpoint=predict_config.checkpoint,
)
data_module = ZambaVideoDataModule(
video_loader_config=video_loader_config,
transform=MODEL_MAPPING[model.__class__.__name__]["transform"],
predict_metadata=predict_config.filepaths,
batch_size=predict_config.batch_size,
num_workers=predict_config.num_workers,
)
validate_species(model, data_module)
if video_loader_config.cache_dir is None:
logger.info("No cache dir is specified. Videos will not be cached.")
else:
logger.info(f"Videos will be cached to {video_loader_config.cache_dir}.")
accelerator, devices = configure_accelerator_and_devices_from_gpus(predict_config.gpus)
trainer = pl.Trainer(
accelerator=accelerator,
devices=devices,
logger=False,
fast_dev_run=predict_config.dry_run,
)
configuration = {
"model_class": model.model_class,
"species": model.species,
"predict_config": json.loads(predict_config.json(exclude={"filepaths"})),
"inference_start_time": datetime.utcnow().isoformat(),
"video_loader_config": json.loads(video_loader_config.json()),
}
if predict_config.save is not False:
config_path = predict_config.save_dir / "predict_configuration.yaml"
logger.info(f"Writing out full configuration to {config_path}.")
with config_path.open("w") as fp:
yaml.dump(configuration, fp)
dataloader = data_module.predict_dataloader()
logger.info("Starting prediction...")
probas = trainer.predict(model=model, dataloaders=dataloader)
df = pd.DataFrame(
np.vstack(probas), columns=model.species, index=dataloader.dataset.original_indices
)
# change output format if specified
if predict_config.proba_threshold is not None:
df = (df > predict_config.proba_threshold).astype(int)
elif predict_config.output_class_names:
df = df.idxmax(axis=1)
else: # round to a useful number of places
df = df.round(5)
if predict_config.save is not False:
preds_path = predict_config.save_dir / "zamba_predictions.csv"
logger.info(f"Saving out predictions to {preds_path}.")
with preds_path.open("w") as fp:
df.to_csv(fp, index=True)
return df
class ModelManager(object):
"""Mediates loading, configuration, and logic of model calls.
Args:
config (ModelConfig): Instantiated ModelConfig.
"""
def __init__(self, config: ModelConfig):
self.config = config
@classmethod
def from_yaml(cls, config):
if not isinstance(config, ModelConfig):
config = ModelConfig.parse_file(config)
return cls(config)
def train(self):
train_model(
train_config=self.config.train_config,
video_loader_config=self.config.video_loader_config,
)
def predict(self):
predict_model(
predict_config=self.config.predict_config,
video_loader_config=self.config.video_loader_config,
)
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | false |
drivendataorg/zamba | https://github.com/drivendataorg/zamba/blob/ce5d28ead22557e412d870780b0dbe3884bbd5cc/zamba/models/config.py | zamba/models/config.py | from enum import Enum
from pathlib import Path
import random
from typing import Dict, Optional, Union
import ffmpeg
from loguru import logger
import numpy as np
import pandas as pd
from pydantic import BaseModel
from pydantic import DirectoryPath, FilePath, validator, root_validator
from pqdm.threads import pqdm
import torch
from tqdm import tqdm
import yaml
from zamba import MODELS_DIRECTORY
from zamba.data.metadata import create_site_specific_splits
from zamba.data.video import VideoLoaderConfig
from zamba.exceptions import ZambaFfmpegException
from zamba.models.registry import available_models
from zamba.models.utils import (
download_weights,
get_checkpoint_hparams,
get_model_checkpoint_filename,
get_model_species,
RegionEnum,
)
from zamba.pytorch.transforms import zamba_image_model_transforms, slowfast_transforms
from zamba.settings import IMAGE_SUFFIXES, SPLIT_SEED, VIDEO_SUFFIXES, get_model_cache_dir
GPUS_AVAILABLE = torch.cuda.device_count()
WEIGHT_LOOKUP = {
"time_distributed": "s3://drivendata-client-zamba/data/results/zamba_classification_retraining/td_full_set/version_1/",
"european": "s3://drivendata-client-zamba/data/results/zamba_v2_classification/european_td_dev_base/version_0/",
"slowfast": "s3://drivendata-client-zamba/data/results/zamba_v2_classification/experiments/slowfast_small_set_full_size_mdlite/version_2/",
"blank_nonblank": "s3://drivendata-client-zamba/data/results/zamba_classification_retraining/td_full_set_bnb/version_0/",
}
MODEL_MAPPING = {
"TimeDistributedEfficientNet": {
"transform": zamba_image_model_transforms(),
"n_frames": 16,
},
"SlowFast": {"transform": slowfast_transforms(), "n_frames": 32},
}
class ModelEnum(str, Enum):
"""Shorthand names of models supported by zamba."""
time_distributed = "time_distributed"
slowfast = "slowfast"
european = "european"
blank_nonblank = "blank_nonblank"
class MonitorEnum(str, Enum):
"""Validation metric to monitor for early stopping. Training is stopped when no
improvement is observed."""
val_macro_f1 = "val_macro_f1"
val_loss = "val_loss"
def validate_gpus(gpus: int):
"""Ensure the number of GPUs requested is equal to or less than the number of GPUs
available on the machine."""
if gpus > GPUS_AVAILABLE:
raise ValueError(f"Found only {GPUS_AVAILABLE} GPU(s). Cannot use {gpus}.")
else:
return gpus
def validate_model_cache_dir(model_cache_dir: Optional[Path]):
"""Set up cache directory for downloading model weight. Order of priority is:
config argument, environment variable, or user's default cache dir.
"""
if model_cache_dir is None:
model_cache_dir = get_model_cache_dir()
model_cache_dir = Path(model_cache_dir)
model_cache_dir.mkdir(parents=True, exist_ok=True)
return model_cache_dir
def check_files_exist_and_load(
df: pd.DataFrame, data_dir: DirectoryPath, skip_load_validation: bool
):
"""Check whether files in file list exist and can be loaded with ffmpeg.
Warn and skip files that don't exist or can't be loaded.
Args:
df (pd.DataFrame): DataFrame with a "filepath" column
data_dir (Path): Data folder to prepend if filepath is not an
absolute path.
skip_load_validation (bool): Skip ffprobe check that verifies all videos
can be loaded.
Returns:
pd.DataFrame: DataFrame with valid and loadable videos.
"""
# update filepath column to prepend data_dir
df["filepath"] = str(data_dir) / df.filepath.path
# we can have multiple rows per file with labels so limit just to one row per file for these checks
files_df = df[["filepath"]].drop_duplicates()
# check for missing files
logger.info(f"Checking all {len(files_df):,} filepaths exist. Trying fast file checking...")
# try to check files in parallel
paths = files_df["filepath"].apply(Path)
exists = pqdm(paths, Path.exists, n_jobs=16)
exists = np.array(exists)
# if fast checking fails, fall back to slow checking
# if an I/O error is in `exists`, the array has dtype `object`
if exists.dtype != bool:
logger.info(
"Fast file checking failed. Running slower check, which can take 30 seconds per thousand files."
)
exists = files_df["filepath"].path.exists()
# select the missing files
invalid_files = files_df[~exists]
# if no files exist
if len(invalid_files) == len(files_df):
raise ValueError(
f"None of the video filepaths exist. Are you sure they're specified correctly? Here's an example invalid path: {invalid_files.filepath.values[0]}. Either specify absolute filepaths in the csv or provide filepaths relative to `data_dir`."
)
# if at least some files exist
elif len(invalid_files) > 0:
logger.debug(
f"The following files could not be found: {'/n'.join(invalid_files.filepath.values.tolist())}"
)
logger.warning(
f"Skipping {len(invalid_files)} file(s) that could not be found. For example, {invalid_files.filepath.values[0]}."
)
# remove invalid files to prep for ffprobe check on remaining
files_df = files_df[~files_df.filepath.isin(invalid_files.filepath)]
bad_load = []
if not skip_load_validation:
logger.info(
"Checking that all videos can be loaded. If you're very confident all your videos can be loaded, you can skip this with `skip_load_validation`, but it's not recommended."
)
# ffprobe check
for f in tqdm(files_df.filepath):
try:
ffmpeg.probe(str(f))
except ffmpeg.Error as exc:
logger.debug(ZambaFfmpegException(exc.stderr))
bad_load.append(f)
if len(bad_load) > 0:
logger.warning(
f"Skipping {len(bad_load)} file(s) that could not be loaded with ffmpeg."
)
df = df[
(~df.filepath.isin(bad_load)) & (~df.filepath.isin(invalid_files.filepath))
].reset_index(drop=True)
return df
def validate_model_name_and_checkpoint(cls, values):
"""Ensures a checkpoint file or model name is provided. If a model name is provided,
looks up the corresponding public checkpoint file from the official configs.
Download the checkpoint if it does not yet exist.
"""
checkpoint = values.get("checkpoint")
model_name = values.get("model_name")
# must specify either checkpoint or model name
if checkpoint is None and model_name is None:
raise ValueError("Must provide either model_name or checkpoint path.")
# checkpoint supercedes model
elif checkpoint is not None and model_name is not None:
logger.info(f"Using checkpoint file: {checkpoint}.")
# get model name from checkpoint so it can be used for the video loader config
hparams = get_checkpoint_hparams(checkpoint)
try:
values["model_name"] = available_models[hparams["model_class"]]._default_model_name
except (AttributeError, KeyError):
model_name = f"{model_name}-{checkpoint.stem}"
elif checkpoint is None and model_name is not None:
if not values.get("from_scratch"):
# get public weights file from official models config
values["checkpoint"] = get_model_checkpoint_filename(model_name)
# if cached version exists, use that
cached_path = Path(values["model_cache_dir"]) / values["checkpoint"]
if cached_path.exists():
values["checkpoint"] = cached_path
# download if checkpoint doesn't exist
if not values["checkpoint"].exists():
logger.info(
f"Downloading weights for model '{model_name}' to {values['model_cache_dir']}."
)
values["checkpoint"] = download_weights(
filename=str(values["checkpoint"]),
weight_region=values["weight_download_region"],
destination_dir=values["model_cache_dir"],
)
return values
def get_filepaths(values, suffix_whitelist):
"""If no file list is passed, get all files in data directory. Warn if there
are unsupported suffixes. Filepaths is set to a dataframe, where column `filepath`
contains files with valid suffixes.
"""
if values["filepaths"] is None:
logger.info(f"Getting files in {values['data_dir']}.")
files = []
new_suffixes = []
# iterate over all files in data directory
for f in Path(values["data_dir"]).rglob("*"):
if f.is_file():
# keep just files with supported suffixes
if f.suffix.lower() in suffix_whitelist:
files.append(f.resolve())
else:
new_suffixes.append(f.suffix.lower())
if len(new_suffixes) > 0:
logger.warning(
f"Ignoring {len(new_suffixes)} file(s) with suffixes {set(new_suffixes)}. To include, specify all suffixes with a VIDEO_SUFFIXES or IMAGE_SUFFIXES environment variable."
)
if len(files) == 0:
error_msg = f"No relevant files found in {values['data_dir']}."
if len(set(new_suffixes) & set(IMAGE_SUFFIXES)) > 0:
error_msg += " Image files *were* found. Use a command starting with `zamba image` to work with images rather than videos."
raise ValueError(error_msg)
logger.info(f"Found {len(files):,} media files in {values['data_dir']}.")
values["filepaths"] = pd.DataFrame(files, columns=["filepath"])
return values
def get_video_filepaths(cls, values):
return get_filepaths(values, VIDEO_SUFFIXES)
class ZambaBaseModel(BaseModel):
"""Set defaults for all models that inherit from the pydantic base model."""
class Config:
extra = "forbid"
use_enum_values = True
validate_assignment = True
class BackboneFinetuneConfig(ZambaBaseModel):
"""Configuration containing parameters to be used for backbone finetuning.
Args:
unfreeze_backbone_at_epoch (int, optional): Epoch at which the backbone
will be unfrozen. Defaults to 5.
backbone_initial_ratio_lr (float, optional): Used to scale down the backbone
learning rate compared to rest of model. Defaults to 0.01.
multiplier (int or float, optional): Multiply the learning rate by a constant
value at the end of each epoch. Defaults to 1.
pre_train_bn (bool, optional): Train batch normalization layers prior to
finetuning. False is recommended for slowfast models and True is recommended
for time distributed models. Defaults to False.
train_bn (bool, optional): Make batch normalization trainable. Defaults to False.
verbose (bool, optional): Display current learning rate for model and backbone.
Defaults to True.
"""
unfreeze_backbone_at_epoch: Optional[int] = 5
backbone_initial_ratio_lr: Optional[float] = 0.01
multiplier: Optional[Union[int, float]] = 1
pre_train_bn: Optional[bool] = False # freeze batch norm layers prior to finetuning
train_bn: Optional[bool] = False # don't train bn layers in unfrozen finetuning layers
verbose: Optional[bool] = True
class EarlyStoppingConfig(ZambaBaseModel):
"""Configuration containing parameters to be used for early stopping.
Args:
monitor (str): Metric to be monitored. Options are "val_macro_f1" or
"val_loss". Defaults to "val_macro_f1".
patience (int): Number of epochs with no improvement after which training
will be stopped. Defaults to 5.
verbose (bool): Verbosity mode. Defaults to True.
mode (str, optional): Options are "min" or "max". In "min" mode, training
will stop when the quantity monitored has stopped decreasing and in
"max" mode it will stop when the quantity monitored has stopped increasing.
If None, mode will be inferred from monitor. Defaults to None.
"""
monitor: MonitorEnum = "val_macro_f1"
patience: int = 5
verbose: bool = True
mode: Optional[str] = None
@root_validator
def validate_mode(cls, values):
mode = {"val_macro_f1": "max", "val_loss": "min"}[values.get("monitor")]
user_mode = values.get("mode")
if user_mode is None:
values["mode"] = mode
elif user_mode != mode:
raise ValueError(
f"Provided mode {user_mode} is incorrect for {values.get('monitor')} monitor."
)
return values
class SchedulerConfig(ZambaBaseModel):
"""Configuration containing parameters for a custom pytorch learning rate scheduler.
See https://pytorch.org/docs/stable/optim.html for options.
Args:
scheduler (str): Name of learning rate scheduler to use. See
https://pytorch.org/docs/stable/optim.html for options.
scheduler_params (dict, optional): Parameters passed to learning rate
scheduler upon initialization (eg. {"milestones": [1], "gamma": 0.5,
"verbose": True}). Defaults to None.
"""
scheduler: Optional[str]
scheduler_params: Optional[dict] = None
@validator("scheduler", always=True)
def validate_scheduler(cls, scheduler):
if scheduler is None:
return None
elif scheduler not in torch.optim.lr_scheduler.__dict__.keys():
raise ValueError(
"Scheduler is not a `torch.optim.lr_scheduler`. "
"See https://github.com/pytorch/pytorch/blob/master/torch/optim/lr_scheduler.py "
"for options."
)
else:
return scheduler
class TrainConfig(ZambaBaseModel):
"""
Configuration for training a video model.
Args:
labels (FilePath or pandas DataFrame): Path to a CSV or pandas DataFrame
containing labels for training, with one row per label. There must be
columns called 'filepath' (absolute or relative to the data_dir) and
'label', and optionally columns called 'split' ("train", "val", or "holdout")
and 'site'. Labels must be specified to train a model.
data_dir (DirectoryPath): Path to a directory containing training
videos. Defaults to the current working directory.
checkpoint (FilePath, optional): Path to a custom checkpoint file (.ckpt)
generated by zamba that can be used to resume training. If None and from_scratch
is False, defaults to a pretrained model. Defaults to None.
scheduler_config (SchedulerConfig or str, optional): Config for setting up
the learning rate scheduler on the model. If "default", uses scheduler
that was used for training. If None, will not use a scheduler.
Defaults to "default".
model_name (str, optional): Name of the model to use for training. Options are:
time_distributed, slowfast, european, blank_nonblank. Defaults to time_distributed.
dry_run (bool or int, Optional): Run one training and validation batch
for one epoch to detect any bugs prior to training the full model.
Disables tuners, checkpoint callbacks, loggers, and logger callbacks.
Defaults to False.
batch_size (int): Batch size to use for training. Defaults to 2.
auto_lr_find (bool): Use a learning rate finder algorithm when calling
trainer.tune() to try to find an optimal initial learning rate. Defaults to
False. The learning rate finder is not guaranteed to find a good learning
rate; depending on the dataset, it can select a learning rate that leads to
poor model training. Use with caution.
backbone_finetune_config (BackboneFinetuneConfig, optional): Set parameters
to finetune a backbone model to align with the current learning rate.
Defaults to a BackboneFinetuneConfig(unfreeze_backbone_at_epoch=5,
backbone_initial_ratio_lr=0.01, multiplier=1, pre_train_bn=False,
train_bn=False, verbose=True).
gpus (int): Number of GPUs to use during training. By default, all of
the available GPUs found on the machine will be used. An error will be raised
if the number of GPUs specified is more than the number that are available.
num_workers (int): Number of subprocesses to use for data loading. 0 means
that the data will be loaded in the main process. The maximum value is
the number of CPUs in the system. Defaults to 3.
max_epochs (int, optional): Stop training once this number of epochs is
reached. Disabled by default (None), which means training continues
until early stopping criteria are met.
early_stopping_config (EarlyStoppingConfig, optional): Configuration for
early stopping, which monitors a metric during training and stops training
when the metric stops improving. Defaults to EarlyStoppingConfig(monitor='val_macro_f1',
patience=5, verbose=True, mode='max').
weight_download_region (str): s3 region to download pretrained weights from.
Options are "us" (United States), "eu" (Europe), or "asia" (Asia Pacific).
Defaults to "us".
split_proportions (dict): Proportions used to divide data into training,
validation, and holdout sets if a "split" column is not included in
labels. Defaults to {"train": 3, "val": 1, "holdout": 1}.
save_dir (Path, optional): Path to a directory where training files
will be saved. Files include the best model checkpoint (``model_name``.ckpt),
training configuration (configuration.yaml), Tensorboard logs
(events.out.tfevents...), test metrics (test_metrics.json), validation
metrics (val_metrics.json), and model hyperparameters (hparams.yml).
If not specified, files are saved to a folder in the current working directory.
overwrite (bool): If True, will save outputs in `save_dir` overwriting if those
exist. If False, will create auto-incremented `version_n` folder in `save_dir`
with model outputs. Defaults to False.
skip_load_validation (bool): Skip ffprobe check, which verifies that all
videos can be loaded and skips files that cannot be loaded. Defaults
to False.
from_scratch (bool): Instantiate the model with base weights. This means
starting with ImageNet weights for image-based models (time_distributed,
european, and blank_nonblank) and Kinetics weights for video-based models
(slowfast). Defaults to False.
use_default_model_labels (bool, optional): By default, output the full set of
default model labels rather than just the species in the labels file. Only
applies if the provided labels are a subset of the default model labels.
If set to False, will replace the model head for finetuning and output only
the species in the provided labels file.
model_cache_dir (Path, optional): Cache directory where downloaded model weights
will be saved. If None and the MODEL_CACHE_DIR environment variable is
not set, uses your default cache directory. Defaults to None.
"""
labels: Union[FilePath, pd.DataFrame]
data_dir: DirectoryPath = ""
checkpoint: Optional[FilePath] = None
scheduler_config: Optional[Union[str, SchedulerConfig]] = "default"
model_name: Optional[ModelEnum] = ModelEnum.time_distributed.value
dry_run: Union[bool, int] = False
batch_size: int = 2
auto_lr_find: bool = False
backbone_finetune_config: Optional[BackboneFinetuneConfig] = BackboneFinetuneConfig()
gpus: int = GPUS_AVAILABLE
num_workers: int = 3
max_epochs: Optional[int] = None
early_stopping_config: Optional[EarlyStoppingConfig] = EarlyStoppingConfig()
weight_download_region: RegionEnum = "us"
split_proportions: Optional[Dict[str, int]] = {"train": 3, "val": 1, "holdout": 1}
save_dir: Path = Path.cwd()
overwrite: bool = False
skip_load_validation: bool = False
from_scratch: bool = False
use_default_model_labels: Optional[bool] = None
model_cache_dir: Optional[Path] = None
class Config:
arbitrary_types_allowed = True
_validate_gpus = validator("gpus", allow_reuse=True, pre=True)(validate_gpus)
_validate_model_cache_dir = validator("model_cache_dir", allow_reuse=True, always=True)(
validate_model_cache_dir
)
@root_validator(skip_on_failure=True)
def validate_from_scratch_and_checkpoint(cls, values):
if values["from_scratch"]:
if values["checkpoint"] is not None:
raise ValueError("If from_scratch=True, you cannot specify a checkpoint.")
if values["model_name"] is None:
raise ValueError("If from_scratch=True, model_name cannot be None.")
return values
_validate_model_name_and_checkpoint = root_validator(allow_reuse=True, skip_on_failure=True)(
validate_model_name_and_checkpoint
)
@validator("scheduler_config", always=True)
def validate_scheduler_config(cls, scheduler_config):
if scheduler_config is None:
return SchedulerConfig(scheduler=None)
elif isinstance(scheduler_config, str) and scheduler_config != "default":
raise ValueError("Scheduler can either be 'default', None, or a SchedulerConfig.")
else:
return scheduler_config
@root_validator(skip_on_failure=True)
def turn_off_load_validation_if_dry_run(cls, values):
if values["dry_run"] and not values["skip_load_validation"]:
logger.info("Turning off video loading check since dry_run=True.")
values["skip_load_validation"] = True
return values
@root_validator(skip_on_failure=True)
def validate_filepaths_and_labels(cls, values):
logger.info("Validating labels csv.")
labels = (
pd.read_csv(values["labels"])
if not isinstance(values["labels"], pd.DataFrame)
else values["labels"]
)
if not set(["label", "filepath"]).issubset(labels.columns):
raise ValueError(f"{values['labels']} must contain `filepath` and `label` columns.")
# subset to required and optional
cols_to_keep = [c for c in labels.columns if c in ["filepath", "label", "site", "split"]]
labels = labels[cols_to_keep]
# validate split column has no partial nulls or invalid values
if "split" in labels.columns:
# if split is entirely null, warn, drop column, and generate splits automatically
if labels.split.isnull().all():
logger.warning(
"Split column is entirely null. Will generate splits automatically using `split_proportions`."
)
labels = labels.drop("split", axis=1)
# error if split column has null values
elif labels.split.isnull().any():
raise ValueError(
f"Found {labels.split.isnull().sum()} row(s) with null `split`. Fill in these rows with either `train`, `val`, or `holdout`. Alternatively, do not include a `split` column in your labels and we'll generate splits for you using `split_proportions`."
)
# otherwise check that split values are valid
elif not set(labels.split).issubset({"train", "val", "holdout"}):
raise ValueError(
f"Found the following invalid values for `split`: {set(labels.split).difference({'train', 'val', 'holdout'})}. `split` can only contain `train`, `val`, or `holdout.`"
)
elif values["split_proportions"] is not None:
logger.warning(
"Labels contains split column yet split_proportions are also provided. Split column in labels takes precedence."
)
# set to None for clarity in final configuration.yaml
values["split_proportions"] = None
# error if labels are entirely null
null_labels = labels.label.isnull()
if sum(null_labels) == len(labels):
raise ValueError("Species cannot be null for all videos.")
# skip and warn about any videos without species label
elif sum(null_labels) > 0:
logger.warning(f"Found {sum(null_labels)} filepath(s) with no label. Will skip.")
labels = labels[~null_labels]
# check that all videos exist and can be loaded
values["labels"] = check_files_exist_and_load(
df=labels,
data_dir=values["data_dir"],
skip_load_validation=values["skip_load_validation"],
)
return values
@root_validator(skip_on_failure=True)
def validate_provided_species_and_use_default_model_labels(cls, values):
"""If the model species are the desired output, the labels file must contain
a subset of the model species.
"""
provided_species = set(values["labels"].label)
model_species = set(
get_model_species(checkpoint=values["checkpoint"], model_name=values["model_name"])
)
if not provided_species.issubset(model_species):
# if labels are not a subset, user cannot set use_default_model_labels to True
if values["use_default_model_labels"]:
raise ValueError(
"Conflicting information between `use_default_model_labels=True` and the "
"species provided in labels file. "
"If you want your model to predict all the zamba species, make sure your "
"labels are a subset. The species in the labels file that are not "
f"in the model species are {provided_species - model_species}. "
"If you want your model to only predict the species in your labels file, "
"set `use_default_model_labels` to False."
)
else:
values["use_default_model_labels"] = False
# if labels are a subset, default to True if no value provided
elif values["use_default_model_labels"] is None:
values["use_default_model_labels"] = True
return values
@root_validator(skip_on_failure=True)
def preprocess_labels(cls, values):
"""One hot encode, add splits, and check for binary case.
Replaces values['labels'] with modified DataFrame.
Args:
values: dictionary containing 'labels' and other config info
"""
logger.info("Preprocessing labels into one hot encoded labels with one row per video.")
labels = values["labels"]
# lowercase to facilitate subset checking
labels["label"] = labels.label.str.lower()
model_species = get_model_species(
checkpoint=values["checkpoint"], model_name=values["model_name"]
)
labels["label"] = pd.Categorical(
labels.label, categories=model_species if values["use_default_model_labels"] else None
)
# one hot encode collapse to one row per video
labels = (
pd.get_dummies(labels.rename(columns={"label": "species"}), columns=["species"])
.groupby("filepath")
.max()
)
# if no "split" column, set up train, val, and holdout split
if "split" not in labels.columns:
make_split(labels, values)
# if there are only two species columns and every video belongs to one of them,
# keep only blank label if it exists to allow resuming of blank_nonblank model
# otherwise drop the second species column so the problem is treated as a binary classification
species_cols = labels.filter(regex="species_").columns
sums = labels[species_cols].sum(axis=1)
if len(species_cols) == 2 and (sums == 1).all():
col_to_keep = "species_blank" if "species_blank" in species_cols else species_cols[0]
col_to_drop = [c for c in species_cols if c != col_to_keep]
logger.warning(
f"Binary case detected so only one species column will be kept. Output will be the binary case of {col_to_keep}."
)
labels = labels.drop(columns=col_to_drop)
# filepath becomes column instead of index
values["labels"] = labels.reset_index()
return values
def make_split(labels, values):
"""Add a split column to `labels`.
Args:
labels: DataFrame with one row per video
values: dictionary with config info
"""
logger.info(
f"Dividing media files into train, val, and holdout sets using the following split proportions: {values['split_proportions']}."
)
# use site info if we have it
if "site" in labels.columns:
logger.info("Using provided 'site' column to do a site-specific split")
labels["split"] = create_site_specific_splits(
labels["site"], proportions=values["split_proportions"]
)
else:
# otherwise randomly allocate
logger.info(
"No 'site' column found so media files for each species will be randomly allocated across splits using provided split proportions."
)
expected_splits = [k for k, v in values["split_proportions"].items() if v > 0]
random.seed(SPLIT_SEED)
# check we have at least as many videos per species as we have splits
# labels are OHE at this point
num_videos_per_species = labels.filter(regex="species_").sum().to_dict()
too_few = {
k.split("species_", 1)[1]: v
for k, v in num_videos_per_species.items()
if 0 < v < len(expected_splits)
}
if len(too_few) > 0:
raise ValueError(
f"Not all species have enough media files to allocate into the following splits: {', '.join(expected_splits)}. A minimum of {len(expected_splits)} media files per label is required. Found the following counts: {too_few}. Either remove these labels or add more images/videos."
)
for c in labels.filter(regex="species_").columns:
species_df = labels[labels[c] > 0]
if len(species_df):
# within each species, seed splits by putting one video in each set and then allocate videos based on split proportions
labels.loc[species_df.index, "split"] = expected_splits + random.choices(
list(values["split_proportions"].keys()),
weights=list(values["split_proportions"].values()),
k=len(species_df) - len(expected_splits),
)
logger.info(f"{labels.split.value_counts()}")
# write splits.csv
filename = values["save_dir"] / "splits.csv"
logger.info(f"Writing out split information to {filename}.")
# create the directory to save if we need to
values["save_dir"].mkdir(parents=True, exist_ok=True)
labels.reset_index()[["filepath", "split"]].drop_duplicates().to_csv(filename, index=False)
class PredictConfig(ZambaBaseModel):
"""
Configuration for using a video model for inference.
Args:
data_dir (DirectoryPath): Path to a directory containing videos for
inference. Defaults to the current working directory.
filepaths (FilePath, optional): Path to a CSV containing videos for inference, with
one row per video in the data_dir. There must be a column called
'filepath' (absolute or relative to the data_dir). If None, uses
all files in data_dir. Defaults to None.
checkpoint (FilePath, optional): Path to a custom checkpoint file (.ckpt)
generated by zamba that can be used to generate predictions. If None,
defaults to a pretrained model. Defaults to None.
model_name (str, optional): Name of the model to use for inference. Options are:
time_distributed, slowfast, european, blank_nonblank. Defaults to time_distributed.
gpus (int): Number of GPUs to use for inference.
Defaults to all of the available GPUs found on the machine.
| python | MIT | ce5d28ead22557e412d870780b0dbe3884bbd5cc | 2026-01-05T07:12:27.321318Z | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.