repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/ticket/tests/batch.py | trac/trac/ticket/tests/batch.py | from trac.perm import PermissionCache
from trac.test import Mock, EnvironmentStub
from trac.ticket import api, default_workflow, web_ui
from trac.ticket.batch import BatchModifyModule
from trac.ticket.model import Ticket
from trac.util.datefmt import utc
import unittest
class BatchModifyTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(default_data=True,
enable=[default_workflow.ConfigurableTicketWorkflow,
web_ui.TicketModule,
api.TicketSystem])
self.req = Mock(href=self.env.href, authname='anonymous', tz=utc)
self.req.session = {}
self.req.perm = PermissionCache(self.env)
def assertCommentAdded(self, ticket_id, comment):
ticket = Ticket(self.env, int(ticket_id))
changes = ticket.get_changelog()
comment_change = [c for c in changes if c[2] == 'comment'][0]
self.assertEqual(comment_change[2], comment)
def assertFieldChanged(self, ticket_id, field, new_value):
ticket = Ticket(self.env, int(ticket_id))
changes = ticket.get_changelog()
field_change = [c for c in changes if c[2] == field][0]
self.assertEqual(field_change[4], new_value)
def _change_list_test_helper(self, original, new, new2, mode):
batch = BatchModifyModule(self.env)
return batch._change_list(original, new, new2, mode)
def _add_list_test_helper(self, original, to_add):
return self._change_list_test_helper(original, to_add, '', '+')
def _remove_list_test_helper(self, original, to_remove):
return self._change_list_test_helper(original, to_remove, '', '-')
def _add_remove_list_test_helper(self, original, to_add, to_remove):
return self._change_list_test_helper(original, to_add, to_remove,
'+-')
def _assign_list_test_helper(self, original, new):
return self._change_list_test_helper(original, new, '', '=')
def _insert_ticket(self, summary, **kw):
"""Helper for inserting a ticket into the database"""
ticket = Ticket(self.env)
for k, v in kw.items():
ticket[k] = v
return ticket.insert()
def test_ignore_summary_reporter_and_description(self):
"""These cannot be added through the UI, but if somebody tries
to build their own POST data they will be ignored."""
batch = BatchModifyModule(self.env)
self.req.args = {}
self.req.args['batchmod_value_summary'] = 'test ticket'
self.req.args['batchmod_value_reporter'] = 'anonymous'
self.req.args['batchmod_value_description'] = 'synergize the widgets'
values = batch._get_new_ticket_values(self.req)
self.assertEqual(len(values), 0)
def test_add_batchmod_value_data_from_request(self):
batch = BatchModifyModule(self.env)
self.req.args = {}
self.req.args['batchmod_value_milestone'] = 'milestone1'
values = batch._get_new_ticket_values(self.req)
self.assertEqual(values['milestone'], 'milestone1')
def test_selected_tickets(self):
self.req.args = { 'selected_tickets' : '1,2,3' }
batch = BatchModifyModule(self.env)
selected_tickets = batch._get_selected_tickets(self.req)
self.assertEqual(selected_tickets, ['1', '2', '3'])
def test_no_selected_tickets(self):
"""If nothing is selected, the return value is the empty list."""
self.req.args = { 'selected_tickets' : '' }
batch = BatchModifyModule(self.env)
selected_tickets = batch._get_selected_tickets(self.req)
self.assertEqual(selected_tickets, [])
# Assign list items
def test_change_list_replace_empty_with_single(self):
"""Replace emtpy field with single item."""
changed = self._assign_list_test_helper('', 'alice')
self.assertEqual(changed, 'alice')
def test_change_list_replace_empty_with_items(self):
"""Replace emtpy field with items."""
changed = self._assign_list_test_helper('', 'alice, bob')
self.assertEqual(changed, 'alice, bob')
def test_change_list_replace_item(self):
"""Replace item with a different item."""
changed = self._assign_list_test_helper('alice', 'bob')
self.assertEqual(changed, 'bob')
def test_change_list_replace_item_with_items(self):
"""Replace item with different items."""
changed = self._assign_list_test_helper('alice', 'bob, carol')
self.assertEqual(changed, 'bob, carol')
def test_change_list_replace_items_with_item(self):
"""Replace items with a different item."""
changed = self._assign_list_test_helper('alice, bob', 'carol')
self.assertEqual(changed, 'carol')
def test_change_list_replace_items(self):
"""Replace items with different items."""
changed = self._assign_list_test_helper('alice, bob', 'carol, dave')
self.assertEqual(changed, 'carol, dave')
def test_change_list_replace_items_partial(self):
"""Replace items with different (or not) items."""
changed = self._assign_list_test_helper('alice, bob', 'bob, dave')
self.assertEqual(changed, 'bob, dave')
def test_change_list_clear(self):
"""Clear field."""
changed = self._assign_list_test_helper('alice bob', '')
self.assertEqual(changed, '')
# Add / remove list items
def test_change_list_add_item(self):
"""Append additional item."""
changed = self._add_list_test_helper('alice', 'bob')
self.assertEqual(changed, 'alice, bob')
def test_change_list_add_items(self):
"""Append additional items."""
changed = self._add_list_test_helper('alice, bob', 'carol, dave')
self.assertEqual(changed, 'alice, bob, carol, dave')
def test_change_list_remove_item(self):
"""Remove existing item."""
changed = self._remove_list_test_helper('alice, bob', 'bob')
self.assertEqual(changed, 'alice')
def test_change_list_remove_items(self):
"""Remove existing items."""
changed = self._remove_list_test_helper('alice, bob, carol',
'alice, carol')
self.assertEqual(changed, 'bob')
def test_change_list_remove_idempotent(self):
"""Ignore missing item to be removed."""
changed = self._remove_list_test_helper('alice', 'bob')
self.assertEqual(changed, 'alice')
def test_change_list_remove_mixed(self):
"""Ignore only missing item to be removed."""
changed = self._remove_list_test_helper('alice, bob', 'bob, carol')
self.assertEqual(changed, 'alice')
def test_change_list_add_remove(self):
"""Remove existing item and append additional item."""
changed = self._add_remove_list_test_helper('alice, bob', 'carol',
'alice')
self.assertEqual(changed, 'bob, carol')
def test_change_list_add_no_duplicates(self):
"""Existing items are not duplicated."""
changed = self._add_list_test_helper('alice, bob', 'bob, carol')
self.assertEqual(changed, 'alice, bob, carol')
def test_change_list_remove_all_duplicates(self):
"""Remove all duplicates."""
changed = self._remove_list_test_helper('alice, bob, alice', 'alice')
self.assertEqual(changed, 'bob')
# Save
def test_save_comment(self):
"""Comments are saved to all selected tickets."""
first_ticket_id = self._insert_ticket('Test 1', reporter='joe')
second_ticket_id = self._insert_ticket('Test 2', reporter='joe')
selected_tickets = [first_ticket_id, second_ticket_id]
batch = BatchModifyModule(self.env)
batch._save_ticket_changes(self.req, selected_tickets, {}, 'comment',
'leave')
self.assertCommentAdded(first_ticket_id, 'comment')
self.assertCommentAdded(second_ticket_id, 'comment')
def test_save_values(self):
"""Changed values are saved to all tickets."""
first_ticket_id = self._insert_ticket('Test 1', reporter='joe',
component='foo')
second_ticket_id = self._insert_ticket('Test 2', reporter='joe')
selected_tickets = [first_ticket_id, second_ticket_id]
new_values = { 'component' : 'bar' }
batch = BatchModifyModule(self.env)
batch._save_ticket_changes(self.req, selected_tickets, new_values, '',
'leave')
self.assertFieldChanged(first_ticket_id, 'component', 'bar')
self.assertFieldChanged(second_ticket_id, 'component', 'bar')
def test_action_with_state_change(self):
"""Actions can have change status."""
self.env.config.set('ticket-workflow', 'embiggen', '* -> big')
first_ticket_id = self._insert_ticket('Test 1', reporter='joe',
status='small')
second_ticket_id = self._insert_ticket('Test 2', reporter='joe')
selected_tickets = [first_ticket_id, second_ticket_id]
batch = BatchModifyModule(self.env)
batch._save_ticket_changes(self.req, selected_tickets, {}, '',
'embiggen')
ticket = Ticket(self.env, int(first_ticket_id))
changes = ticket.get_changelog()
self.assertFieldChanged(first_ticket_id, 'status', 'big')
self.assertFieldChanged(second_ticket_id, 'status', 'big')
def test_action_with_side_effects(self):
"""Actions can have operations with side effects."""
self.env.config.set('ticket-workflow', 'buckify', '* -> *')
self.env.config.set('ticket-workflow', 'buckify.operations',
'set_owner')
self.req.args = {}
self.req.args['action_buckify_reassign_owner'] = 'buck'
first_ticket_id = self._insert_ticket('Test 1', reporter='joe',
owner='foo')
second_ticket_id = self._insert_ticket('Test 2', reporter='joe')
selected_tickets = [first_ticket_id, second_ticket_id]
batch = BatchModifyModule(self.env)
batch._save_ticket_changes(self.req, selected_tickets, {}, '',
'buckify')
ticket = Ticket(self.env, int(first_ticket_id))
changes = ticket.get_changelog()
self.assertFieldChanged(first_ticket_id, 'owner', 'buck')
self.assertFieldChanged(second_ticket_id, 'owner', 'buck')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BatchModifyTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/ticket/tests/roadmap.py | trac/trac/ticket/tests/roadmap.py | from trac.test import EnvironmentStub
from trac.ticket.roadmap import *
from trac.core import ComponentManager
import unittest
class TicketGroupStatsTestCase(unittest.TestCase):
def setUp(self):
self.stats = TicketGroupStats('title', 'units')
def test_init(self):
self.assertEquals('title', self.stats.title, 'title incorrect')
self.assertEquals('units', self.stats.unit, 'unit incorrect')
self.assertEquals(0, self.stats.count, 'count not zero')
self.assertEquals(0, len(self.stats.intervals), 'intervals not empty')
def test_add_iterval(self):
self.stats.add_interval('intTitle', 3, {'k1': 'v1'}, 'css', 0)
self.stats.refresh_calcs()
self.assertEquals(3, self.stats.count, 'count not incremented')
int = self.stats.intervals[0]
self.assertEquals('intTitle', int['title'], 'title incorrect')
self.assertEquals(3, int['count'], 'count incorrect')
self.assertEquals({'k1': 'v1'}, int['qry_args'], 'query args incorrect')
self.assertEquals('css', int['css_class'], 'css class incorrect')
self.assertEquals(100, int['percent'], 'percent incorrect')
self.stats.add_interval('intTitle', 3, {'k1': 'v1'}, 'css', 0)
self.stats.refresh_calcs()
self.assertEquals(50, int['percent'], 'percent not being updated')
def test_add_interval_no_prog(self):
self.stats.add_interval('intTitle', 3, {'k1': 'v1'}, 'css', 0)
self.stats.add_interval('intTitle', 5, {'k1': 'v1'}, 'css', 0)
self.stats.refresh_calcs()
interval = self.stats.intervals[1]
self.assertEquals(0, self.stats.done_count, 'count added for no prog')
self.assertEquals(0, self.stats.done_percent, 'percent incremented')
def test_add_interval_prog(self):
self.stats.add_interval('intTitle', 3, {'k1': 'v1'}, 'css', 0)
self.stats.add_interval('intTitle', 1, {'k1': 'v1'}, 'css', 1)
self.stats.refresh_calcs()
self.assertEquals(4, self.stats.count, 'count not incremented')
self.assertEquals(1, self.stats.done_count, 'count not added to prog')
self.assertEquals(25, self.stats.done_percent, 'done percent not incr')
def test_add_interval_fudging(self):
self.stats.add_interval('intTitle', 3, {'k1': 'v1'}, 'css', 0)
self.stats.add_interval('intTitle', 5, {'k1': 'v1'}, 'css', 1)
self.stats.refresh_calcs()
self.assertEquals(8, self.stats.count, 'count not incremented')
self.assertEquals(5, self.stats.done_count, 'count not added to prog')
self.assertEquals(62, self.stats.done_percent,
'done percnt not fudged downward')
self.assertEquals(62, self.stats.intervals[1]['percent'],
'interval percent not fudged downward')
self.assertEquals(38, self.stats.intervals[0]['percent'],
'interval percent not fudged upward')
class DefaultTicketGroupStatsProviderTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(default_data=True)
self.milestone1 = Milestone(self.env)
self.milestone1.name = 'Test'
self.milestone1.insert()
self.milestone2 = Milestone(self.env)
self.milestone2.name = 'Test2'
self.milestone2.insert()
tkt1 = Ticket(self.env)
tkt1.populate({'summary': 'Foo', 'milestone': 'Test', 'owner': 'foman',
'status': 'new'})
tkt1.insert()
tkt2 = Ticket(self.env)
tkt2.populate({'summary': 'Bar', 'milestone': 'Test',
'status': 'closed', 'owner': 'barman'})
tkt2.insert()
tkt3 = Ticket(self.env)
tkt3.populate({'summary': 'Sum', 'milestone': 'Test', 'owner': 'suman',
'status': 'reopened'})
tkt3.insert()
self.tkt1 = tkt1
self.tkt2 = tkt2
self.tkt3 = tkt3
prov = DefaultTicketGroupStatsProvider(ComponentManager())
prov.env = self.env
prov.config = self.env.config
self.stats = prov.get_ticket_group_stats([tkt1.id, tkt2.id, tkt3.id])
def tearDown(self):
self.env.reset_db()
def test_stats(self):
self.assertEquals(self.stats.title, 'ticket status', 'title incorrect')
self.assertEquals(self.stats.unit, 'tickets', 'unit incorrect')
self.assertEquals(2, len(self.stats.intervals), 'more than 2 intervals')
def test_closed_interval(self):
closed = self.stats.intervals[0]
self.assertEquals('closed', closed['title'], 'closed title incorrect')
self.assertEquals('closed', closed['css_class'], 'closed class incorrect')
self.assertEquals(True, closed['overall_completion'],
'closed should contribute to overall completion')
self.assertEquals({'status': ['closed'], 'group': ['resolution']},
closed['qry_args'], 'qry_args incorrect')
self.assertEquals(1, closed['count'], 'closed count incorrect')
self.assertEquals(33, closed['percent'], 'closed percent incorrect')
def test_open_interval(self):
open = self.stats.intervals[1]
self.assertEquals('active', open['title'], 'open title incorrect')
self.assertEquals('open', open['css_class'], 'open class incorrect')
self.assertEquals(False, open['overall_completion'],
"open shouldn't contribute to overall completion")
self.assertEquals({'status':
[u'assigned', u'new', u'accepted', u'reopened']},
open['qry_args'], 'qry_args incorrect')
self.assertEquals(2, open['count'], 'open count incorrect')
self.assertEquals(67, open['percent'], 'open percent incorrect')
def in_tlist(ticket, list):
return len([t for t in list if t['id'] == ticket.id]) > 0
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TicketGroupStatsTestCase, 'test'))
suite.addTest(unittest.makeSuite(DefaultTicketGroupStatsProviderTestCase,
'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/wikisyntax.py | trac/trac/tests/wikisyntax.py | import os
import shutil
import tempfile
import unittest
from trac.attachment import Attachment
from trac.mimeview.api import RenderingContext
from trac.resource import Resource
from trac.search.web_ui import SearchModule
from trac.test import MockPerm
from trac.web.href import Href
from trac.wiki.tests import formatter
SEARCH_TEST_CASES = u"""
============================== search: link resolver
search:foo
search:"foo bar"
[search:bar Bar]
[search:bar]
[search:]
------------------------------
<p>
<a class="search" href="/search?q=foo">search:foo</a>
<a class="search" href="/search?q=foo+bar">search:"foo bar"</a>
<a class="search" href="/search?q=bar">Bar</a>
<a class="search" href="/search?q=bar">bar</a>
<a class="search" href="/search">search</a>
</p>
------------------------------
============================== search: link resolver with query arguments
search:foo?wiki=on
search:?q=foo&wiki=on
search:"foo bar?wiki=on"
search:"?q=foo bar&wiki=on"
[search:bar?ticket=on Bar in Tickets]
[search:?q=bar&ticket=on Bar in Tickets]
------------------------------
<p>
<a class="search" href="/search?q=foo&wiki=on">search:foo?wiki=on</a>
<a class="search" href="/search?q=foo&wiki=on">search:?q=foo&wiki=on</a>
<a class="search" href="/search?q=foo+bar&wiki=on">search:"foo bar?wiki=on"</a>
<a class="search" href="/search?q=foo+bar&wiki=on">search:"?q=foo bar&wiki=on"</a>
<a class="search" href="/search?q=bar&ticket=on">Bar in Tickets</a>
<a class="search" href="/search?q=bar&ticket=on">Bar in Tickets</a>
</p>
------------------------------
"""
ATTACHMENT_TEST_CASES = u"""
============================== attachment: link resolver (deprecated)
attachment:wiki:WikiStart:file.txt (deprecated)
attachment:ticket:123:file.txt (deprecated)
[attachment:wiki:WikiStart:file.txt file.txt] (deprecated)
[attachment:ticket:123:file.txt] (deprecated)
------------------------------
<p>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">attachment:wiki:WikiStart:file.txt</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a> (deprecated)
<a class="attachment" href="/attachment/ticket/123/file.txt" title="Attachment 'file.txt' in Ticket #123">attachment:ticket:123:file.txt</a><a class="trac-rawlink" href="/raw-attachment/ticket/123/file.txt" title="Download"></a> (deprecated)
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">file.txt</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a> (deprecated)
<a class="attachment" href="/attachment/ticket/123/file.txt" title="Attachment 'file.txt' in Ticket #123">ticket:123:file.txt</a><a class="trac-rawlink" href="/raw-attachment/ticket/123/file.txt" title="Download"></a> (deprecated)
</p>
------------------------------
============================== attachment: "foreign" links
attachment:file.txt:wiki:WikiStart
attachment:file.txt:ticket:123
[attachment:file.txt:wiki:WikiStart file.txt]
[attachment:file.txt:ticket:123]
attachment:foo.txt:wiki:SomePage/SubPage
------------------------------
<p>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">attachment:file.txt:wiki:WikiStart</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a>
<a class="attachment" href="/attachment/ticket/123/file.txt" title="Attachment 'file.txt' in Ticket #123">attachment:file.txt:ticket:123</a><a class="trac-rawlink" href="/raw-attachment/ticket/123/file.txt" title="Download"></a>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">file.txt</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a>
<a class="attachment" href="/attachment/ticket/123/file.txt" title="Attachment 'file.txt' in Ticket #123">file.txt:ticket:123</a><a class="trac-rawlink" href="/raw-attachment/ticket/123/file.txt" title="Download"></a>
<a class="attachment" href="/attachment/wiki/SomePage/SubPage/foo.txt" title="Attachment 'foo.txt' in SomePage/SubPage">attachment:foo.txt:wiki:SomePage/SubPage</a><a class="trac-rawlink" href="/raw-attachment/wiki/SomePage/SubPage/foo.txt" title="Download"></a>
</p>
------------------------------
============================== attachment: "local" links
attachment:file.txt
[attachment:file.txt that file]
------------------------------
<p>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">attachment:file.txt</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">that file</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt" title="Download"></a>
</p>
------------------------------
============================== attachment: "missing" links
attachment:foo.txt
[attachment:foo.txt other file]
------------------------------
<p>
<a class="missing attachment">attachment:foo.txt</a>
<a class="missing attachment">other file</a>
</p>
------------------------------
============================== attachment: "raw" links
raw-attachment:file.txt
[raw-attachment:file.txt that file]
------------------------------
<p>
<a class="attachment" href="/raw-attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">raw-attachment:file.txt</a>
<a class="attachment" href="/raw-attachment/wiki/WikiStart/file.txt" title="Attachment 'file.txt' in WikiStart">that file</a>
</p>
------------------------------
============================== attachment: raw format as explicit argument
attachment:file.txt?format=raw
[attachment:file.txt?format=raw that file]
------------------------------
<p>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt?format=raw" title="Attachment 'file.txt' in WikiStart">attachment:file.txt?format=raw</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt?format=raw" title="Download"></a>
<a class="attachment" href="/attachment/wiki/WikiStart/file.txt?format=raw" title="Attachment 'file.txt' in WikiStart">that file</a><a class="trac-rawlink" href="/raw-attachment/wiki/WikiStart/file.txt?format=raw" title="Download"></a>
</p>
------------------------------
""" # "
def attachment_setup(tc):
import trac.ticket.api
import trac.wiki.api
tc.env.path = os.path.join(tempfile.gettempdir(), 'trac-tempenv')
os.mkdir(tc.env.path)
attachment = Attachment(tc.env, 'wiki', 'WikiStart')
attachment.insert('file.txt', tempfile.TemporaryFile(), 0)
attachment = Attachment(tc.env, 'ticket', 123)
attachment.insert('file.txt', tempfile.TemporaryFile(), 0)
attachment = Attachment(tc.env, 'wiki', 'SomePage/SubPage')
attachment.insert('foo.txt', tempfile.TemporaryFile(), 0)
def attachment_teardown(tc):
shutil.rmtree(tc.env.path)
tc.env.reset_db()
EMAIL_TEST_CASE_DEFAULT = u"""
============================== mailto: obfuscated by default, like plain email
user@example.org vs. mailto:user@example.org
and [mailto:user@example.org Joe User]
------------------------------
<p>
user@\u2026 vs. mailto:user@\u2026
and Joe User
</p>
------------------------------
"""
def email_default_context():
class NoEmailViewPerm(MockPerm):
def has_permission(self, action, realm_or_resource=None, id=False,
version=False):
return action != 'EMAIL_VIEW'
__contains__ = has_permission
context = RenderingContext(Resource('wiki', 'WikiStart'), href=Href('/'),
perm=NoEmailViewPerm())
context.req = None # 1.0 FIXME .req shouldn't be required by formatter
return context
EMAIL_TEST_CASE_NEVER_OBFUSCATE = u"""
============================== mailto: not obfuscated, unlike plain email
user@example.org vs. mailto:user@example.org
and [mailto:user@example.org Joe User]
------------------------------
<p>
user@\u2026 vs. <a class="mail-link" href="mailto:user@example.org"><span class="icon"></span>mailto:user@example.org</a>
and <a class="mail-link" href="mailto:user@example.org"><span class="icon"></span>Joe User</a>
</p>
------------------------------
"""
def email_never_obfuscate_setup(tc):
tc.env.config.set('trac', 'never_obfuscate_mailto', True)
def suite():
suite = unittest.TestSuite()
suite.addTest(formatter.suite(SEARCH_TEST_CASES, file=__file__))
suite.addTest(formatter.suite(ATTACHMENT_TEST_CASES, file=__file__,
context=('wiki', 'WikiStart'),
setup=attachment_setup,
teardown=attachment_teardown))
suite.addTest(formatter.suite(EMAIL_TEST_CASE_DEFAULT, file=__file__,
context=email_default_context()))
suite.addTest(formatter.suite(EMAIL_TEST_CASE_NEVER_OBFUSCATE,
file=__file__,
context=email_default_context(),
setup=email_never_obfuscate_setup))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/attachment.py | trac/trac/tests/attachment.py | # -*- coding: utf-8 -*-
import os.path
import shutil
from StringIO import StringIO
import tempfile
import unittest
from trac.attachment import Attachment, AttachmentModule
from trac.core import Component, implements, TracError
from trac.perm import IPermissionPolicy, PermissionCache
from trac.resource import Resource, resource_exists
from trac.test import EnvironmentStub
from trac.tests.resource import TestResourceChangeListener
hashes = {
'42': '92cfceb39d57d914ed8b14d0e37643de0797ae56',
'Foo.Mp3': '95797b6eb253337ff2c54e0881e2b747ec394f51',
'SomePage': 'd7e80bae461ca8568e794792f5520b603f540e06',
'Teh bar.jpg': 'ed9102c4aa099e92baf1073f824d21c5e4be5944',
'Teh foo.txt': 'ab97ba98d98fcf72b92e33a66b07077010171f70',
'bar.7z': '6c9600ad4d59ac864e6f0d2030c1fc76b4b406cb',
'bar.jpg': 'ae0faa593abf2b6f8871f6f32fe5b28d1c6572be',
'foo.$$$': 'eefc6aa745dbe129e8067a4a57637883edd83a8a',
'foo.2.txt': 'a8fcfcc2ef4e400ee09ae53c1aabd7f5a5fda0c7',
'foo.txt': '9206ac42b532ef8e983470c251f4e1a365fd636c',
u'bar.aäc': '70d0e3b813fdc756602d82748719a3ceb85cbf29',
u'ÜberSicht': 'a16c6837f6d3d2cc3addd68976db1c55deb694c8',
}
class TicketOnlyViewsTicket(Component):
implements(IPermissionPolicy)
def check_permission(self, action, username, resource, perm):
if action.startswith('TICKET_'):
return resource.realm == 'ticket'
else:
return None
class AttachmentTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.env.path = os.path.join(tempfile.gettempdir(), 'trac-tempenv')
os.mkdir(self.env.path)
self.attachments_dir = os.path.join(self.env.path, 'files',
'attachments')
self.env.config.set('trac', 'permission_policies',
'TicketOnlyViewsTicket, LegacyAttachmentPolicy')
self.env.config.set('attachment', 'max_size', 512)
self.perm = PermissionCache(self.env)
def tearDown(self):
shutil.rmtree(self.env.path)
self.env.reset_db()
def test_get_path(self):
attachment = Attachment(self.env, 'ticket', 42)
attachment.filename = 'foo.txt'
self.assertEqual(os.path.join(self.attachments_dir, 'ticket',
hashes['42'][0:3], hashes['42'],
hashes['foo.txt'] + '.txt'),
attachment.path)
attachment = Attachment(self.env, 'wiki', 'SomePage')
attachment.filename = 'bar.jpg'
self.assertEqual(os.path.join(self.attachments_dir, 'wiki',
hashes['SomePage'][0:3],
hashes['SomePage'],
hashes['bar.jpg'] + '.jpg'),
attachment.path)
def test_path_extension(self):
attachment = Attachment(self.env, 'ticket', 42)
attachment.filename = 'Foo.Mp3'
self.assertEqual(os.path.join(self.attachments_dir, 'ticket',
hashes['42'][0:3], hashes['42'],
hashes['Foo.Mp3'] + '.Mp3'),
attachment.path)
attachment = Attachment(self.env, 'wiki', 'SomePage')
attachment.filename = 'bar.7z'
self.assertEqual(os.path.join(self.attachments_dir, 'wiki',
hashes['SomePage'][0:3],
hashes['SomePage'],
hashes['bar.7z'] + '.7z'),
attachment.path)
attachment = Attachment(self.env, 'ticket', 42)
attachment.filename = 'foo.$$$'
self.assertEqual(os.path.join(self.attachments_dir, 'ticket',
hashes['42'][0:3], hashes['42'],
hashes['foo.$$$']),
attachment.path)
attachment = Attachment(self.env, 'wiki', 'SomePage')
attachment.filename = u'bar.aäc'
self.assertEqual(os.path.join(self.attachments_dir, 'wiki',
hashes['SomePage'][0:3],
hashes['SomePage'],
hashes[u'bar.aäc']),
attachment.path)
def test_get_path_encoded(self):
attachment = Attachment(self.env, 'ticket', 42)
attachment.filename = 'Teh foo.txt'
self.assertEqual(os.path.join(self.attachments_dir, 'ticket',
hashes['42'][0:3], hashes['42'],
hashes['Teh foo.txt'] + '.txt'),
attachment.path)
attachment = Attachment(self.env, 'wiki', u'ÜberSicht')
attachment.filename = 'Teh bar.jpg'
self.assertEqual(os.path.join(self.attachments_dir, 'wiki',
hashes[u'ÜberSicht'][0:3],
hashes[u'ÜberSicht'],
hashes['Teh bar.jpg'] + '.jpg'),
attachment.path)
def test_select_empty(self):
self.assertRaises(StopIteration,
Attachment.select(self.env, 'ticket', 42).next)
self.assertRaises(StopIteration,
Attachment.select(self.env, 'wiki', 'SomePage').next)
def test_insert(self):
attachment = Attachment(self.env, 'ticket', 42)
attachment.insert('foo.txt', StringIO(''), 0, 1)
attachment = Attachment(self.env, 'ticket', 42)
attachment.insert('bar.jpg', StringIO(''), 0, 2)
attachments = Attachment.select(self.env, 'ticket', 42)
self.assertEqual('foo.txt', attachments.next().filename)
self.assertEqual('bar.jpg', attachments.next().filename)
self.assertRaises(StopIteration, attachments.next)
def test_insert_unique(self):
attachment = Attachment(self.env, 'ticket', 42)
attachment.insert('foo.txt', StringIO(''), 0)
self.assertEqual('foo.txt', attachment.filename)
attachment = Attachment(self.env, 'ticket', 42)
attachment.insert('foo.txt', StringIO(''), 0)
self.assertEqual('foo.2.txt', attachment.filename)
self.assertEqual(os.path.join(self.attachments_dir, 'ticket',
hashes['42'][0:3], hashes['42'],
hashes['foo.2.txt'] + '.txt'),
attachment.path)
self.assert_(os.path.exists(attachment.path))
def test_insert_outside_attachments_dir(self):
attachment = Attachment(self.env, '../../../../../sth/private', 42)
self.assertRaises(TracError, attachment.insert, 'foo.txt',
StringIO(''), 0)
def test_delete(self):
attachment1 = Attachment(self.env, 'wiki', 'SomePage')
attachment1.insert('foo.txt', StringIO(''), 0)
attachment2 = Attachment(self.env, 'wiki', 'SomePage')
attachment2.insert('bar.jpg', StringIO(''), 0)
attachments = Attachment.select(self.env, 'wiki', 'SomePage')
self.assertEqual(2, len(list(attachments)))
attachment1.delete()
attachment2.delete()
assert not os.path.exists(attachment1.path)
assert not os.path.exists(attachment2.path)
attachments = Attachment.select(self.env, 'wiki', 'SomePage')
self.assertEqual(0, len(list(attachments)))
def test_delete_file_gone(self):
"""
Verify that deleting an attachment works even if the referenced file
doesn't exist for some reason.
"""
attachment = Attachment(self.env, 'wiki', 'SomePage')
attachment.insert('foo.txt', StringIO(''), 0)
os.unlink(attachment.path)
attachment.delete()
def test_reparent(self):
attachment1 = Attachment(self.env, 'wiki', 'SomePage')
attachment1.insert('foo.txt', StringIO(''), 0)
path1 = attachment1.path
attachment2 = Attachment(self.env, 'wiki', 'SomePage')
attachment2.insert('bar.jpg', StringIO(''), 0)
attachments = Attachment.select(self.env, 'wiki', 'SomePage')
self.assertEqual(2, len(list(attachments)))
attachments = Attachment.select(self.env, 'ticket', 123)
self.assertEqual(0, len(list(attachments)))
assert os.path.exists(path1) and os.path.exists(attachment2.path)
attachment1.reparent('ticket', 123)
self.assertEqual('ticket', attachment1.parent_realm)
self.assertEqual('ticket', attachment1.resource.parent.realm)
self.assertEqual('123', attachment1.parent_id)
self.assertEqual('123', attachment1.resource.parent.id)
attachments = Attachment.select(self.env, 'wiki', 'SomePage')
self.assertEqual(1, len(list(attachments)))
attachments = Attachment.select(self.env, 'ticket', 123)
self.assertEqual(1, len(list(attachments)))
assert not os.path.exists(path1) and os.path.exists(attachment1.path)
assert os.path.exists(attachment2.path)
def test_legacy_permission_on_parent(self):
"""Ensure that legacy action tests are done on parent. As
`ATTACHMENT_VIEW` maps to `TICKET_VIEW`, the `TICKET_VIEW` is tested
against the ticket's resource."""
attachment = Attachment(self.env, 'ticket', 42)
self.assert_('ATTACHMENT_VIEW' in self.perm(attachment.resource))
def test_resource_doesnt_exist(self):
r = Resource('wiki', 'WikiStart').child('attachment', 'file.txt')
self.assertEqual(False, AttachmentModule(self.env).resource_exists(r))
def test_resource_exists(self):
att = Attachment(self.env, 'wiki', 'WikiStart')
att.insert('file.txt', StringIO(''), 1)
self.assertTrue(resource_exists(self.env, att.resource))
class AttachmentResourceChangeListenerTestCase(unittest.TestCase):
DUMMY_PARENT_REALM = "wiki"
DUMMY_PARENT_ID = "WikiStart"
def setUp(self):
self.env = EnvironmentStub(default_data=True)
self.listener = TestResourceChangeListener(self.env)
self.listener.resource_type = Attachment
self.listener.callback = self.listener_callback
def tearDown(self):
self.env.reset_db()
def test_change_listener_created(self):
attachment = self._create_attachment()
self.assertEqual('created', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, Attachment))
self.assertEqual(attachment.filename, self.filename)
self.assertEqual(attachment.parent_realm, self.parent_realm)
self.assertEqual(attachment.parent_id, self.parent_id)
def test_change_listener_reparent(self):
attachment = self._create_attachment()
attachment.reparent(self.DUMMY_PARENT_REALM, "SomePage")
self.assertEqual('changed', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, Attachment))
self.assertEqual(attachment.filename, self.filename)
self.assertEqual(attachment.parent_realm, self.parent_realm)
self.assertEqual("SomePage", self.parent_id)
self.assertNotIn("parent_realm", self.listener.old_values)
self.assertEqual(
self.DUMMY_PARENT_ID, self.listener.old_values["parent_id"])
def test_change_listener_deleted(self):
attachment = self._create_attachment()
attachment.delete()
self.assertEqual('deleted', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, Attachment))
self.assertEqual(attachment.filename, self.filename)
def _create_attachment(self):
attachment = Attachment(
self.env, self.DUMMY_PARENT_REALM, self.DUMMY_PARENT_ID)
attachment.insert('file.txt', StringIO(''), 1)
return attachment
def listener_callback(self, action, resource, context, old_values = None):
self.parent_realm = resource.parent_realm
self.parent_id = resource.parent_id
self.filename = resource.filename
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AttachmentTestCase, 'test'))
suite.addTest(unittest.makeSuite(
AttachmentResourceChangeListenerTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/core.py | trac/trac/tests/core.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2005-2009 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from trac.core import *
import unittest
class ITest(Interface):
def test():
"""Dummy function."""
class IOtherTest(Interface):
def other_test():
"""Other dummy function."""
class ComponentTestCase(unittest.TestCase):
def setUp(self):
from trac.core import ComponentManager, ComponentMeta
self.compmgr = ComponentManager()
# Make sure we have no external components hanging around in the
# component registry
self.old_registry = ComponentMeta._registry
ComponentMeta._registry = {}
def tearDown(self):
# Restore the original component registry
from trac.core import ComponentMeta
ComponentMeta._registry = self.old_registry
def test_base_class_not_registered(self):
"""
Make sure that the Component base class does not appear in the component
registry.
"""
from trac.core import ComponentMeta
assert Component not in ComponentMeta._components
self.assertRaises(TracError, self.compmgr.__getitem__, Component)
def test_abstract_component_not_registered(self):
"""
Make sure that a Component class marked as abstract does not appear in
the component registry.
"""
from trac.core import ComponentMeta
class AbstractComponent(Component):
abstract = True
assert AbstractComponent not in ComponentMeta._components
self.assertRaises(TracError, self.compmgr.__getitem__,
AbstractComponent)
def test_unregistered_component(self):
"""
Make sure the component manager refuses to manage classes not derived
from `Component`.
"""
class NoComponent(object):
pass
self.assertRaises(TracError, self.compmgr.__getitem__, NoComponent)
def test_component_registration(self):
"""
Verify that classes derived from `Component` are managed by the
component manager.
"""
class ComponentA(Component):
pass
assert self.compmgr[ComponentA]
assert ComponentA(self.compmgr)
def test_component_identity(self):
"""
Make sure instantiating a component multiple times just returns the
same instance again.
"""
class ComponentA(Component):
pass
c1 = ComponentA(self.compmgr)
c2 = ComponentA(self.compmgr)
assert c1 is c2, 'Expected same component instance'
c2 = self.compmgr[ComponentA]
assert c1 is c2, 'Expected same component instance'
def test_component_initializer(self):
"""
Makes sure that a components' `__init__` method gets called.
"""
class ComponentA(Component):
def __init__(self):
self.data = 'test'
self.assertEqual('test', ComponentA(self.compmgr).data)
ComponentA(self.compmgr).data = 'newtest'
self.assertEqual('newtest', ComponentA(self.compmgr).data)
def test_inherited_component_initializer(self):
"""
Makes sure that a the `__init__` method of a components' super-class
gets called if the component doesn't override it.
"""
class ComponentA(Component):
def __init__(self):
self.data = 'foo'
class ComponentB(ComponentA):
def __init__(self):
self.data = 'bar'
class ComponentC(ComponentB):
pass
self.assertEqual('bar', ComponentC(self.compmgr).data)
ComponentC(self.compmgr).data = 'baz'
self.assertEqual('baz', ComponentC(self.compmgr).data)
def test_implements_called_outside_classdef(self):
"""
Verify that calling implements() outside a class definition raises an
`AssertionError`.
"""
try:
implements()
except AssertionError:
pass
else:
self.fail('Expected AssertionError')
def test_implements_multiple(self):
"""
Verify that a component "implementing" an interface more than once
(e.g. through inheritance) is not called more than once from an
extension point.
"""
log = []
class Parent(Component):
abstract = True
implements(ITest)
class Child(Parent):
implements(ITest)
def test(self):
log.append("call")
class Other(Component):
tests = ExtensionPoint(ITest)
for test in Other(self.compmgr).tests:
test.test()
self.assertEqual(["call"], log)
def test_attribute_access(self):
"""
Verify that accessing undefined attributes on components raises an
`AttributeError`.
"""
class ComponentA(Component):
pass
comp = ComponentA(self.compmgr)
try:
comp.foo
self.fail('Expected AttributeError')
except AttributeError:
pass
def test_nonconforming_extender(self):
"""
Verify that accessing a method of a declared extension point interface
raises a normal `AttributeError` if the component does not implement
the method.
"""
class ComponentA(Component):
tests = ExtensionPoint(ITest)
class ComponentB(Component):
implements(ITest)
tests = iter(ComponentA(self.compmgr).tests)
try:
tests.next().test()
self.fail('Expected AttributeError')
except AttributeError:
pass
def test_extension_point_with_no_extension(self):
"""
Verify that accessing an extension point with no extenders returns an
empty list.
"""
class ComponentA(Component):
tests = ExtensionPoint(ITest)
tests = iter(ComponentA(self.compmgr).tests)
self.assertRaises(StopIteration, tests.next)
def test_extension_point_with_one_extension(self):
"""
Verify that a single component extending an extension point can be
accessed through the extension point attribute of the declaring
component.
"""
class ComponentA(Component):
tests = ExtensionPoint(ITest)
class ComponentB(Component):
implements(ITest)
def test(self):
return 'x'
tests = iter(ComponentA(self.compmgr).tests)
self.assertEquals('x', tests.next().test())
self.assertRaises(StopIteration, tests.next)
def test_extension_point_with_two_extensions(self):
"""
Verify that two components extending an extension point can be accessed
through the extension point attribute of the declaring component.
"""
class ComponentA(Component):
tests = ExtensionPoint(ITest)
class ComponentB(Component):
implements(ITest)
def test(self):
return 'x'
class ComponentC(Component):
implements(ITest)
def test(self):
return 'y'
results = [test.test() for test in ComponentA(self.compmgr).tests]
self.assertEquals(['x', 'y'], sorted(results))
def test_inherited_extension_point(self):
"""
Verify that extension points are inherited to sub-classes.
"""
class BaseComponent(Component):
tests = ExtensionPoint(ITest)
class ConcreteComponent(BaseComponent):
pass
class ExtendingComponent(Component):
implements(ITest)
def test(self):
return 'x'
tests = iter(ConcreteComponent(self.compmgr).tests)
self.assertEquals('x', tests.next().test())
self.assertRaises(StopIteration, tests.next)
def test_inherited_implements(self):
"""
Verify that a component with a super-class implementing an extension
point interface is also registered as implementing that interface.
"""
class BaseComponent(Component):
implements(ITest)
abstract = True
class ConcreteComponent(BaseComponent):
pass
from trac.core import ComponentMeta
assert ConcreteComponent in ComponentMeta._registry.get(ITest, [])
def test_inherited_implements_multilevel(self):
"""
Verify that extension point interfaces are inherited for more than
one level of inheritance.
"""
class BaseComponent(Component):
implements(ITest)
abstract = True
class ChildComponent(BaseComponent):
implements(IOtherTest)
abstract = True
class ConcreteComponent(ChildComponent):
pass
from trac.core import ComponentMeta
assert ConcreteComponent in ComponentMeta._registry.get(ITest, [])
assert ConcreteComponent in ComponentMeta._registry.get(IOtherTest, [])
def test_component_manager_component(self):
"""
Verify that a component manager can itself be a component with its own
extension points.
"""
from trac.core import ComponentManager
class ManagerComponent(ComponentManager, Component):
tests = ExtensionPoint(ITest)
def __init__(self, foo, bar):
ComponentManager.__init__(self)
self.foo, self.bar = foo, bar
class Extender(Component):
implements(ITest)
def test(self):
return 'x'
mgr = ManagerComponent('Test', 42)
assert id(mgr) == id(mgr[ManagerComponent])
tests = iter(mgr.tests)
self.assertEquals('x', tests.next().test())
self.assertRaises(StopIteration, tests.next)
def test_component_manager_component_isolation(self):
"""
Verify that a component manager that is also a component will only
be listed in extension points for components instantiated in
its scope.
See bh:comment:5:ticket:438 and #11121
"""
from trac.core import ComponentManager
class ManagerComponent(ComponentManager, Component):
tests = ExtensionPoint(ITest)
def __init__(self, foo, bar):
ComponentManager.__init__(self)
self.foo, self.bar = foo, bar
class YetAnotherManagerComponent(ComponentManager, Component):
implements(ITest)
def __init__(self, foo, bar):
ComponentManager.__init__(self)
self.foo, self.bar = foo, bar
# ITest methods
def test(self):
return self.foo + self.bar
class ComponentA(Component):
tests = ExtensionPoint(ITest)
class Extender(Component):
implements(ITest)
def test(self):
return 'x'
mgr = ManagerComponent('Test', 42)
yamc = YetAnotherManagerComponent('y', 'z')
assert yamc[ManagerComponent] is None
assert mgr[YetAnotherManagerComponent] is None
assert yamc[ComponentManager] is None
assert self.compmgr[YetAnotherManagerComponent] is None
assert mgr[ComponentManager] is None
assert self.compmgr[ManagerComponent] is None
self.assertTrue(any(c.__class__ is YetAnotherManagerComponent
for c in ComponentA(yamc).tests))
self.assertFalse(any(c.__class__ is YetAnotherManagerComponent
for c in ComponentA(self.compmgr).tests))
self.assertFalse(any(c.__class__ is YetAnotherManagerComponent
for c in ComponentA(mgr).tests))
self.assertFalse(any(c.__class__ is ManagerComponent
for c in ComponentA(yamc).tests))
self.assertFalse(any(c.__class__ is YetAnotherManagerComponent
for c in mgr.tests))
results = [test.test() for test in ComponentA(yamc).tests]
self.assertEquals(['x', 'yz'], sorted(results))
results = [test.test() for test in ComponentA(self.compmgr).tests]
self.assertEquals(['x'], sorted(results))
results = [test.test() for test in ComponentA(mgr).tests]
self.assertEquals(['x'], sorted(results))
results = [test.test() for test in mgr.tests]
self.assertEquals(['x'], sorted(results))
def test_instantiation_doesnt_enable(self):
"""
Make sure that a component disabled by the ComponentManager is not
implicitly enabled by instantiating it directly.
"""
from trac.core import ComponentManager
class DisablingComponentManager(ComponentManager):
def is_component_enabled(self, cls):
return False
class ComponentA(Component):
pass
mgr = DisablingComponentManager()
instance = ComponentA(mgr)
self.assertEqual(None, mgr[ComponentA])
def suite():
return unittest.makeSuite(ComponentTestCase, 'test')
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/resource.py | trac/trac/tests/resource.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import doctest
import unittest
from trac import resource
from trac.resource import IResourceChangeListener
from trac.core import implements, Component
class ResourceTestCase(unittest.TestCase):
def test_equals(self):
# Plain equalities
self.assertEqual(resource.Resource(), resource.Resource())
self.assertEqual(resource.Resource(None), resource.Resource())
self.assertEqual(resource.Resource('wiki'), resource.Resource('wiki'))
self.assertEqual(resource.Resource('wiki', 'WikiStart'),
resource.Resource('wiki', 'WikiStart'))
self.assertEqual(resource.Resource('wiki', 'WikiStart', 42),
resource.Resource('wiki', 'WikiStart', 42))
# Inequalities
self.assertNotEqual(resource.Resource('wiki', 'WikiStart', 42),
resource.Resource('wiki', 'WikiStart', 43))
self.assertNotEqual(resource.Resource('wiki', 'WikiStart', 0),
resource.Resource('wiki', 'WikiStart', None))
# Resource hierarchy
r1 = resource.Resource('attachment', 'file.txt')
r1.parent = resource.Resource('wiki', 'WikiStart')
r2 = resource.Resource('attachment', 'file.txt')
r2.parent = resource.Resource('wiki', 'WikiStart')
self.assertEqual(r1, r2)
r2.parent = r2.parent(version=42)
self.assertNotEqual(r1, r2)
class NeighborhoodTestCase(unittest.TestCase):
def test_equals(self):
# Plain equalities
self.assertEqual(resource.Neighborhood(), resource.Neighborhood())
self.assertEqual(resource.Neighborhood(None), resource.Neighborhood())
self.assertEqual(resource.Neighborhood('realm'),
resource.Neighborhood('realm'))
self.assertEqual(resource.Neighborhood('realm', 'id'),
resource.Neighborhood('realm', 'id'))
# Inequalities
self.assertNotEqual(resource.Neighborhood('realm', 'id'),
resource.Neighborhood('realm', 'id1'))
self.assertNotEqual(resource.Neighborhood('realm1', 'id'),
resource.Neighborhood('realm', 'id'))
def test_resources_equals(self):
nbh = resource.Neighborhood('realm', 'id')
nbh1 = resource.Neighborhood('realm', 'id1')
# Plain equalities
self.assertEqual(nbh(resource.Resource()), nbh(resource.Resource()))
self.assertEqual(nbh(resource.Resource(None)), nbh(resource.Resource()))
self.assertEqual(nbh(resource.Resource('wiki')),
nbh(resource.Resource('wiki')))
self.assertEqual(nbh(resource.Resource('wiki', 'WikiStart')),
nbh(resource.Resource('wiki', 'WikiStart')))
self.assertEqual(nbh(resource.Resource('wiki', 'WikiStart', 42)),
nbh(resource.Resource('wiki', 'WikiStart', 42)))
# Inequalities
self.assertNotEqual(nbh(resource.Resource('wiki', 'WikiStart', 42)),
nbh(resource.Resource('wiki', 'WikiStart', 43)))
self.assertNotEqual(nbh(resource.Resource('wiki', 'WikiStart', 0)),
nbh(resource.Resource('wiki', 'WikiStart', None)))
self.assertNotEqual(nbh1(resource.Resource()),
nbh(resource.Resource()))
self.assertNotEqual(nbh1(resource.Resource(None)),
nbh(resource.Resource()))
self.assertNotEqual(nbh1(resource.Resource('wiki')),
nbh(resource.Resource('wiki')))
self.assertNotEqual(nbh1(resource.Resource('wiki', 'WikiStart')),
nbh(resource.Resource('wiki', 'WikiStart')))
self.assertNotEqual(nbh1(resource.Resource('wiki', 'WikiStart', 42)),
nbh(resource.Resource('wiki', 'WikiStart', 42)))
# Resource hierarchy
r1 = nbh(resource.Resource('attachment', 'file.txt'))
r1.parent = nbh(resource.Resource('wiki', 'WikiStart'))
r2 = nbh(resource.Resource('attachment', 'file.txt'))
r2.parent = nbh(resource.Resource('wiki', 'WikiStart'))
self.assertEqual(r1, r2)
r2.parent = r2.parent(version=42)
self.assertNotEqual(r1, r2)
def test_hierarchy_clone(self):
def enum_parents(r):
while r is not None:
yield r
r = r.parent
nbh = resource.Neighborhood('realm', 'id')
nbh1 = resource.Neighborhood('realm', 'id1')
src = resource.Resource('attachment', 'file.txt')
src.parent = resource.Resource('wiki', 'WikiStart')
src.parent.parent = resource.Resource('x', 'y')
self.assertTrue(all(r.neighborhood is nbh
for r in enum_parents(nbh(src))))
self.assertTrue(all(r.neighborhood is None
for r in enum_parents(src)))
src = nbh1(src)
self.assertTrue(all(r.neighborhood is nbh
for r in enum_parents(nbh(src))))
self.assertTrue(all(r.neighborhood is nbh1
for r in enum_parents(src)))
class TestResourceChangeListener(Component):
implements(IResourceChangeListener)
def __init__(self):
self.resource_type = None
def callback(self, action, resource, context, old_values = None):
pass
def match_resource(self, resource):
if self.resource_type is None:
return False
return isinstance(resource, self.resource_type)
def resource_created(self, resource, context):
self.action = "created"
self.resource = resource
self.context = context
self.callback(self.action, resource, context)
def resource_changed(self, resource, old_values, context):
self.action = "changed"
self.resource = resource
self.old_values = old_values
self.context = context
self.callback(
self.action, resource, context, old_values=self.old_values)
def resource_deleted(self, resource, context):
self.action = "deleted"
self.resource = resource
self.context = context
self.callback(self.action, resource, context)
def resource_version_deleted(self, resource, context):
self.action = "version_deleted"
self.resource = resource
self.context = context
self.callback(self.action, resource, context)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(resource))
suite.addTest(unittest.makeSuite(ResourceTestCase, 'test'))
suite.addTest(unittest.makeSuite(NeighborhoodTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/allwiki.py | trac/trac/tests/allwiki.py | import unittest
import trac.tests.wikisyntax
import trac.ticket.tests.wikisyntax
import trac.versioncontrol.web_ui.tests.wikisyntax
import trac.web.tests.wikisyntax
import trac.wiki.tests.wikisyntax
import trac.wiki.tests.formatter
def suite():
suite = unittest.TestSuite()
suite.addTest(trac.tests.wikisyntax.suite())
suite.addTest(trac.ticket.tests.wikisyntax.suite())
suite.addTest(trac.versioncontrol.web_ui.tests.wikisyntax.suite())
suite.addTest(trac.web.tests.wikisyntax.suite())
suite.addTest(trac.wiki.tests.macros.suite())
suite.addTest(trac.wiki.tests.wikisyntax.suite())
suite.addTest(trac.wiki.tests.formatter.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/config.py | trac/trac/tests/config.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005-2007 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from __future__ import with_statement
import os
import tempfile
import time
import unittest
from trac.config import *
from trac.test import Configuration
from trac.util import create_file
class ConfigurationTestCase(unittest.TestCase):
def setUp(self):
tmpdir = os.path.realpath(tempfile.gettempdir())
self.filename = os.path.join(tmpdir, 'trac-test.ini')
self._write([])
self._orig_registry = Option.registry
Option.registry = {}
def tearDown(self):
Option.registry = self._orig_registry
os.remove(self.filename)
def _read(self):
return Configuration(self.filename)
def _write(self, lines):
with open(self.filename, 'w') as fileobj:
fileobj.write(('\n'.join(lines + [''])).encode('utf-8'))
def test_default(self):
config = self._read()
self.assertEquals('', config.get('a', 'option'))
self.assertEquals('value', config.get('a', 'option', 'value'))
class Foo(object):
option_a = Option('a', 'option', 'value')
self.assertEquals('value', config.get('a', 'option'))
def test_default_bool(self):
config = self._read()
self.assertEquals(False, config.getbool('a', 'option'))
self.assertEquals(True, config.getbool('a', 'option', 'yes'))
self.assertEquals(True, config.getbool('a', 'option', 1))
class Foo(object):
option_a = Option('a', 'option', 'true')
self.assertEquals(True, config.getbool('a', 'option'))
def test_default_int(self):
config = self._read()
self.assertRaises(ConfigurationError,
config.getint, 'a', 'option', 'b')
self.assertEquals(0, config.getint('a', 'option'))
self.assertEquals(1, config.getint('a', 'option', '1'))
self.assertEquals(1, config.getint('a', 'option', 1))
class Foo(object):
option_a = Option('a', 'option', '2')
self.assertEquals(2, config.getint('a', 'option'))
def test_default_float(self):
config = self._read()
self.assertRaises(ConfigurationError,
config.getfloat, 'a', 'option', 'b')
self.assertEquals(0.0, config.getfloat('a', 'option'))
self.assertEquals(1.2, config.getfloat('a', 'option', '1.2'))
self.assertEquals(1.2, config.getfloat('a', 'option', 1.2))
self.assertEquals(1.0, config.getfloat('a', 'option', 1))
class Foo(object):
option_a = Option('a', 'option', '2.5')
self.assertEquals(2.5, config.getfloat('a', 'option'))
def test_default_path(self):
config = self._read()
class Foo(object):
option_a = PathOption('a', 'opt1', 'file.ini')
option_b = PathOption('a', 'opt2', '/somewhere/file.ini')
self.assertEquals('file.ini', config.get('a', 'opt1'))
self.assertNotEquals('file.ini', config.getpath('a', 'opt1'))
self.assertTrue(os.path.isabs(config.getpath('a', 'opt1')))
self.assertEquals('/somewhere/file.ini', os.path.splitdrive(
config.getpath('a', 'opt2'))[1].replace('\\', '/'))
self.assertEquals('/none.ini', os.path.splitdrive(
config.getpath('a', 'opt3', '/none.ini'))[1].replace('\\', '/'))
self.assertNotEquals('none.ini', config.getpath('a', 'opt3', 'none.ini'))
def test_read_and_get(self):
self._write(['[a]', 'option = x'])
config = self._read()
self.assertEquals('x', config.get('a', 'option'))
self.assertEquals('x', config.get('a', 'option', 'y'))
self.assertEquals('y', config.get('b', 'option2', 'y'))
def test_read_and_get_unicode(self):
self._write([u'[ä]', u'öption = x'])
config = self._read()
self.assertEquals('x', config.get(u'ä', u'öption'))
self.assertEquals('x', config.get(u'ä', u'öption', 'y'))
self.assertEquals('y', config.get('b', u'öption2', 'y'))
def test_read_and_getbool(self):
self._write(['[a]', 'option = yes', 'option2 = true',
'option3 = eNaBlEd', 'option4 = on',
'option5 = 1', 'option6 = 123', 'option7 = 123.456',
'option8 = disabled', 'option9 = 0', 'option10 = 0.0'])
config = self._read()
self.assertEquals(True, config.getbool('a', 'option'))
self.assertEquals(True, config.getbool('a', 'option', False))
self.assertEquals(True, config.getbool('a', 'option2'))
self.assertEquals(True, config.getbool('a', 'option3'))
self.assertEquals(True, config.getbool('a', 'option4'))
self.assertEquals(True, config.getbool('a', 'option5'))
self.assertEquals(True, config.getbool('a', 'option6'))
self.assertEquals(True, config.getbool('a', 'option7'))
self.assertEquals(False, config.getbool('a', 'option8'))
self.assertEquals(False, config.getbool('a', 'option9'))
self.assertEquals(False, config.getbool('a', 'option10'))
self.assertEquals(False, config.getbool('b', 'option_b'))
self.assertEquals(False, config.getbool('b', 'option_b', False))
self.assertEquals(False, config.getbool('b', 'option_b', 'disabled'))
def test_read_and_getint(self):
self._write(['[a]', 'option = 42'])
config = self._read()
self.assertEquals(42, config.getint('a', 'option'))
self.assertEquals(42, config.getint('a', 'option', 25))
self.assertEquals(0, config.getint('b', 'option2'))
self.assertEquals(25, config.getint('b', 'option2', 25))
self.assertEquals(25, config.getint('b', 'option2', '25'))
def test_read_and_getfloat(self):
self._write(['[a]', 'option = 42.5'])
config = self._read()
self.assertEquals(42.5, config.getfloat('a', 'option'))
self.assertEquals(42.5, config.getfloat('a', 'option', 25.3))
self.assertEquals(0, config.getfloat('b', 'option2'))
self.assertEquals(25.3, config.getfloat('b', 'option2', 25.3))
self.assertEquals(25.0, config.getfloat('b', 'option2', 25))
self.assertEquals(25.3, config.getfloat('b', 'option2', '25.3'))
def test_read_and_getlist(self):
self._write(['[a]', 'option = foo, bar, baz'])
config = self._read()
self.assertEquals(['foo', 'bar', 'baz'],
config.getlist('a', 'option'))
self.assertEquals([],
config.getlist('b', 'option2'))
self.assertEquals(['foo', 'bar', 'baz'],
config.getlist('b', 'option2', ['foo', 'bar', 'baz']))
self.assertEquals(['foo', 'bar', 'baz'],
config.getlist('b', 'option2', 'foo, bar, baz'))
def test_read_and_getlist_sep(self):
self._write(['[a]', 'option = foo | bar | baz'])
config = self._read()
self.assertEquals(['foo', 'bar', 'baz'],
config.getlist('a', 'option', sep='|'))
def test_read_and_getlist_keep_empty(self):
self._write(['[a]', 'option = ,bar,baz'])
config = self._read()
self.assertEquals(['bar', 'baz'], config.getlist('a', 'option'))
self.assertEquals(['', 'bar', 'baz'],
config.getlist('a', 'option', keep_empty=True))
def test_read_and_getlist_false_values(self):
config = self._read()
values = [None, False, '', 'foo', u'', u'bar',
0, 0L, 0.0, 0j, 42, 43.0]
self.assertEquals([False, 'foo', u'bar', 0, 0L, 0.0, 0j, 42, 43.0],
config.getlist('a', 'false', values))
self.assertEquals(values, config.getlist('a', 'false', values,
keep_empty=True))
def test_read_and_choice(self):
self._write(['[a]', 'option = 2', 'invalid = d'])
config = self._read()
class Foo(object):
# enclose in parentheses to avoid messages extraction
option = (ChoiceOption)('a', 'option', ['Item1', 2, '3'])
other = (ChoiceOption)('a', 'other', [1, 2, 3])
invalid = (ChoiceOption)('a', 'invalid', ['a', 'b', 'c'])
def __init__(self):
self.config = config
foo = Foo()
self.assertEquals('2', foo.option)
self.assertEquals('1', foo.other)
self.assertRaises(ConfigurationError, getattr, foo, 'invalid')
def test_getpath(self):
base = os.path.dirname(self.filename)
config = self._read()
config.set('a', 'path_a', os.path.join(base, 'here', 'absolute.txt'))
config.set('a', 'path_b', 'thisdir.txt')
config.set('a', 'path_c', os.path.join(os.pardir, 'parentdir.txt'))
self.assertEquals(os.path.join(base, 'here', 'absolute.txt'),
config.getpath('a', 'path_a'))
self.assertEquals(os.path.join(base, 'thisdir.txt'),
config.getpath('a', 'path_b'))
self.assertEquals(os.path.join(os.path.dirname(base), 'parentdir.txt'),
config.getpath('a', 'path_c'))
def test_set_and_save(self):
config = self._read()
config.set('b', u'öption0', 'y')
config.set(u'aä', 'öption0', 'x')
config.set('aä', 'option2', "Voilà l'été") # UTF-8
config.set(u'aä', 'option1', u"Voilà l'été") # unicode
# Note: the following would depend on the locale.getpreferredencoding()
# config.set('a', 'option3', "Voil\xe0 l'\xe9t\xe9") # latin-1
self.assertEquals('x', config.get(u'aä', u'öption0'))
self.assertEquals(u"Voilà l'été", config.get(u'aä', 'option1'))
self.assertEquals(u"Voilà l'été", config.get(u'aä', 'option2'))
config.save()
configfile = open(self.filename, 'r')
self.assertEquals(['# -*- coding: utf-8 -*-\n',
'\n',
'[aä]\n',
"option1 = Voilà l'été\n",
"option2 = Voilà l'été\n",
'öption0 = x\n',
# "option3 = Voilà l'été\n",
'\n',
'[b]\n',
'öption0 = y\n',
'\n'],
configfile.readlines())
configfile.close()
config2 = Configuration(self.filename)
self.assertEquals('x', config2.get(u'aä', u'öption0'))
self.assertEquals(u"Voilà l'été", config2.get(u'aä', 'option1'))
self.assertEquals(u"Voilà l'été", config2.get(u'aä', 'option2'))
# self.assertEquals(u"Voilà l'été", config2.get('a', 'option3'))
def test_set_and_save_inherit(self):
def testcb():
config = self._read()
config.set('a', 'option2', "Voilà l'été") # UTF-8
config.set('a', 'option1', u"Voilà l'été") # unicode
self.assertEquals('x', config.get('a', 'option'))
self.assertEquals(u"Voilà l'été", config.get('a', 'option1'))
self.assertEquals(u"Voilà l'été", config.get('a', 'option2'))
config.save()
configfile = open(self.filename, 'r')
self.assertEquals(['# -*- coding: utf-8 -*-\n',
'\n',
'[a]\n',
"option1 = Voilà l'été\n",
"option2 = Voilà l'été\n",
'\n',
'[inherit]\n',
"file = trac-site.ini\n",
'\n'],
configfile.readlines())
configfile.close()
config2 = Configuration(self.filename)
self.assertEquals('x', config2.get('a', 'option'))
self.assertEquals(u"Voilà l'été", config2.get('a', 'option1'))
self.assertEquals(u"Voilà l'été", config2.get('a', 'option2'))
self._test_with_inherit(testcb)
def test_simple_remove(self):
self._write(['[a]', 'option = x'])
config = self._read()
config.get('a', 'option') # populates the cache
config.set(u'aä', u'öption', u'öne')
config.remove('a', 'option')
self.assertEquals('', config.get('a', 'option'))
config.remove(u'aä', u'öption')
self.assertEquals('', config.get('aä', 'öption'))
config.remove('a', 'option2') # shouldn't fail
config.remove('b', 'option2') # shouldn't fail
def test_sections(self):
self._write(['[a]', 'option = x', '[b]', 'option = y'])
config = self._read()
self.assertEquals(['a', 'b'], config.sections())
class Foo(object):
# enclose in parentheses to avoid messages extraction
section_c = (ConfigSection)('c', 'Doc for c')
option_c = Option('c', 'option', 'value')
self.assertEquals(['a', 'b', 'c'], config.sections())
foo = Foo()
foo.config = config
self.assert_(foo.section_c is config['c'])
self.assertEquals('value', foo.section_c.get('option'))
def test_sections_unicode(self):
self._write([u'[aä]', u'öption = x', '[b]', 'option = y'])
config = self._read()
self.assertEquals([u'aä', 'b'], config.sections())
class Foo(object):
option_c = Option(u'cä', 'option', 'value')
self.assertEquals([u'aä', 'b', u'cä'], config.sections())
def test_options(self):
self._write(['[a]', 'option = x', '[b]', 'option = y'])
config = self._read()
self.assertEquals(('option', 'x'), iter(config.options('a')).next())
self.assertEquals(('option', 'y'), iter(config.options('b')).next())
self.assertRaises(StopIteration, iter(config.options('c')).next)
self.assertEquals('option', iter(config['a']).next())
self.assertEquals('option', iter(config['b']).next())
self.assertRaises(StopIteration, iter(config['c']).next)
class Foo(object):
option_a = Option('a', 'b', 'c')
self.assertEquals([('option', 'x'), ('b', 'c')],
list(config.options('a')))
def test_options_unicode(self):
self._write([u'[ä]', u'öption = x', '[b]', 'option = y'])
config = self._read()
self.assertEquals((u'öption', 'x'), iter(config.options(u'ä')).next())
self.assertEquals(('option', 'y'), iter(config.options('b')).next())
self.assertRaises(StopIteration, iter(config.options('c')).next)
self.assertEquals(u'öption', iter(config['ä']).next())
class Foo(object):
option_a = Option(u'ä', u'öption2', 'c')
self.assertEquals([(u'öption', 'x'), (u'öption2', 'c')],
list(config.options(u'ä')))
def test_has_option(self):
config = self._read()
self.assertEquals(False, config.has_option('a', 'option'))
self.assertEquals(False, 'option' in config['a'])
self._write(['[a]', 'option = x'])
config = self._read()
self.assertEquals(True, config.has_option('a', 'option'))
self.assertEquals(True, 'option' in config['a'])
class Foo(object):
option_a = Option('a', 'option2', 'x2')
self.assertEquals(True, config.has_option('a', 'option2'))
def test_has_option_unicode(self):
config = self._read()
self.assertEquals(False, config.has_option(u'ä', u'öption'))
self.assertEquals(False, u'öption' in config[u'ä'])
self._write([u'[ä]', u'öption = x'])
config = self._read()
self.assertEquals(True, config.has_option(u'ä', u'öption'))
self.assertEquals(True, u'öption' in config[u'ä'])
class Foo(object):
option_a = Option(u'ä', u'öption2', 'x2')
self.assertEquals(True, config.has_option(u'ä', u'öption2'))
def test_reparse(self):
self._write(['[a]', 'option = x'])
config = self._read()
self.assertEquals('x', config.get('a', 'option'))
time.sleep(2) # needed because of low mtime granularity,
# especially on fat filesystems
self._write(['[a]', 'option = y'])
config.parse_if_needed()
self.assertEquals('y', config.get('a', 'option'))
def test_inherit_one_level(self):
def testcb():
config = self._read()
self.assertEqual('x', config.get('a', 'option'))
self.assertEqual(['a', 'inherit'], config.sections())
config.remove('a', 'option') # Should *not* remove option in parent
self.assertEqual('x', config.get('a', 'option'))
self.assertEqual([('option', 'x')], list(config.options('a')))
self.assertEqual(True, 'a' in config)
self._test_with_inherit(testcb)
def test_inherit_multiple(self):
class Foo(object):
option_b = Option('b', 'option2', 'default')
base = os.path.dirname(self.filename)
relsite1 = os.path.join('sub1', 'trac-site1.ini')
site1 = os.path.join(base, relsite1)
relsite2 = os.path.join('sub2', 'trac-site2.ini')
site2 = os.path.join(base, relsite2)
os.mkdir(os.path.dirname(site1))
create_file(site1, '[a]\noption1 = x\n'
'[c]\noption = 1\npath1 = site1\n')
try:
os.mkdir(os.path.dirname(site2))
create_file(site2, '[b]\noption2 = y\n'
'[c]\noption = 2\npath2 = site2\n')
try:
self._write(['[inherit]',
'file = %s, %s' % (relsite1, relsite2)])
config = self._read()
self.assertEqual('x', config.get('a', 'option1'))
self.assertEqual('y', config.get('b', 'option2'))
self.assertEqual('1', config.get('c', 'option'))
self.assertEqual(os.path.join(base, 'site1'),
config.getpath('c', 'path1'))
self.assertEqual(os.path.join(base, 'site2'),
config.getpath('c', 'path2'))
self.assertEqual('',
config.getpath('c', 'path3'))
self.assertEqual(os.path.join(base, 'site4'),
config.getpath('c', 'path4', 'site4'))
finally:
os.remove(site2)
os.rmdir(os.path.dirname(site2))
finally:
os.remove(site1)
os.rmdir(os.path.dirname(site1))
def _test_with_inherit(self, testcb):
sitename = os.path.join(tempfile.gettempdir(), 'trac-site.ini')
try:
with open(sitename, 'w') as sitefile:
sitefile.write('[a]\noption = x\n')
self._write(['[inherit]', 'file = trac-site.ini'])
testcb()
finally:
os.remove(sitename)
def suite():
return unittest.makeSuite(ConfigurationTestCase, 'test')
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/__init__.py | trac/trac/tests/__init__.py | import unittest
from trac.tests import attachment, config, core, env, perm, resource, \
wikisyntax, functional
def suite():
suite = unittest.TestSuite()
suite.addTest(basicSuite())
suite.addTest(functionalSuite())
return suite
def basicSuite():
suite = unittest.TestSuite()
suite.addTest(attachment.suite())
suite.addTest(config.suite())
suite.addTest(core.suite())
suite.addTest(env.suite())
suite.addTest(perm.suite())
suite.addTest(resource.suite())
suite.addTest(wikisyntax.suite())
return suite
def functionalSuite():
return functional.suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/notification.py | trac/trac/tests/notification.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005-2006 Emmanuel Blot <emmanuel.blot@free.fr>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Include a basic SMTP server, based on L. Smithson
# (lsmithson@open-networks.co.uk) extensible Python SMTP Server
#
# This file does not contain unit tests, but provides a set of
# classes to run SMTP notification tests
#
import socket
import string
import threading
import re
import base64
import quopri
LF = '\n'
CR = '\r'
email_re = re.compile(r"([\w\d_\.\-])+\@(([\w\d\-])+\.)+([\w\d]{2,4})+")
header_re = re.compile(r'^=\?(?P<charset>[\w\d\-]+)\?(?P<code>[qb])\?(?P<value>.*)\?=$')
class SMTPServerInterface:
"""
A base class for the imlementation of an application specific SMTP
Server. Applications should subclass this and overide these
methods, which by default do nothing.
A method is defined for each RFC821 command. For each of these
methods, 'args' is the complete command received from the
client. The 'data' method is called after all of the client DATA
is received.
If a method returns 'None', then a '250 OK'message is
automatically sent to the client. If a subclass returns a non-null
string then it is returned instead.
"""
def helo(self, args):
return None
def mail_from(self, args):
return None
def rcpt_to(self, args):
return None
def data(self, args):
return None
def quit(self, args):
return None
def reset(self, args):
return None
#
# Some helper functions for manipulating from & to addresses etc.
#
def strip_address(address):
"""
Strip the leading & trailing <> from an address. Handy for
getting FROM: addresses.
"""
start = string.index(address, '<') + 1
end = string.index(address, '>')
return address[start:end]
def split_to(address):
"""
Return 'address' as undressed (host, fulladdress) tuple.
Handy for use with TO: addresses.
"""
start = string.index(address, '<') + 1
sep = string.index(address, '@') + 1
end = string.index(address, '>')
return (address[sep:end], address[start:end],)
#
# This drives the state for a single RFC821 message.
#
class SMTPServerEngine:
"""
Server engine that calls methods on the SMTPServerInterface object
passed at construction time. It is constructed with a bound socket
connection to a client. The 'chug' method drives the state,
returning when the client RFC821 transaction is complete.
"""
ST_INIT = 0
ST_HELO = 1
ST_MAIL = 2
ST_RCPT = 3
ST_DATA = 4
ST_QUIT = 5
def __init__(self, socket, impl):
self.impl = impl
self.socket = socket
self.state = SMTPServerEngine.ST_INIT
def chug(self):
"""
Chug the engine, till QUIT is received from the client. As
each RFC821 message is received, calls are made on the
SMTPServerInterface methods on the object passed at
construction time.
"""
self.socket.send("220 Welcome to Trac notification test server\r\n")
while 1:
data = ''
completeLine = 0
# Make sure an entire line is received before handing off
# to the state engine. Thanks to John Hall for pointing
# this out.
while not completeLine:
try:
lump = self.socket.recv(1024)
if len(lump):
data += lump
if (len(data) >= 2) and data[-2:] == '\r\n':
completeLine = 1
if self.state != SMTPServerEngine.ST_DATA:
rsp, keep = self.do_command(data)
else:
rsp = self.do_data(data)
if rsp == None:
continue
self.socket.send(rsp + "\r\n")
if keep == 0:
self.socket.close()
return
else:
# EOF
return
except socket.error:
return
def do_command(self, data):
"""Process a single SMTP Command"""
cmd = data[0:4]
cmd = string.upper(cmd)
keep = 1
rv = None
if cmd == "HELO":
self.state = SMTPServerEngine.ST_HELO
rv = self.impl.helo(data[5:])
elif cmd == "RSET":
rv = self.impl.reset(data[5:])
self.data_accum = ""
self.state = SMTPServerEngine.ST_INIT
elif cmd == "NOOP":
pass
elif cmd == "QUIT":
rv = self.impl.quit(data[5:])
keep = 0
elif cmd == "MAIL":
if self.state != SMTPServerEngine.ST_HELO:
return ("503 Bad command sequence", 1)
self.state = SMTPServerEngine.ST_MAIL
rv = self.impl.mail_from(data[5:])
elif cmd == "RCPT":
if (self.state != SMTPServerEngine.ST_MAIL) and \
(self.state != SMTPServerEngine.ST_RCPT):
return ("503 Bad command sequence", 1)
self.state = SMTPServerEngine.ST_RCPT
rv = self.impl.rcpt_to(data[5:])
elif cmd == "DATA":
if self.state != SMTPServerEngine.ST_RCPT:
return ("503 Bad command sequence", 1)
self.state = SMTPServerEngine.ST_DATA
self.data_accum = ""
return ("354 OK, Enter data, terminated with a \\r\\n.\\r\\n", 1)
else:
return ("505 Eh? WTF was that?", 1)
if rv:
return (rv, keep)
else:
return("250 OK", keep)
def do_data(self, data):
"""
Process SMTP Data. Accumulates client DATA until the
terminator is found.
"""
self.data_accum = self.data_accum + data
if len(self.data_accum) > 4 and self.data_accum[-5:] == '\r\n.\r\n':
self.data_accum = self.data_accum[:-5]
rv = self.impl.data(self.data_accum)
self.state = SMTPServerEngine.ST_HELO
if rv:
return rv
else:
return "250 OK - Data and terminator. found"
else:
return None
class SMTPServer:
"""
A single threaded SMTP Server connection manager. Listens for
incoming SMTP connections on a given port. For each connection,
the SMTPServerEngine is chugged, passing the given instance of
SMTPServerInterface.
"""
def __init__(self, port):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._socket.bind(("127.0.0.1", port))
self._socket_service = None
def serve(self, impl):
while ( self._resume ):
try:
nsd = self._socket.accept()
except socket.error:
return
self._socket_service = nsd[0]
engine = SMTPServerEngine(self._socket_service, impl)
engine.chug()
self._socket_service = None
def start(self):
self._socket.listen(1)
self._resume = True
def stop(self):
self._resume = False
def terminate(self):
if self._socket_service:
# force the blocking socket to stop waiting for data
try:
#self._socket_service.shutdown(2)
self._socket_service.close()
except AttributeError:
# the SMTP server may also discard the socket
pass
self._socket_service = None
if self._socket:
#self._socket.shutdown(2)
self._socket.close()
self._socket = None
class SMTPServerStore(SMTPServerInterface):
"""
Simple store for SMTP data
"""
def __init__(self):
self.reset(None)
def helo(self, args):
self.reset(None)
def mail_from(self, args):
if args.lower().startswith('from:'):
self.sender = strip_address(args[5:].replace('\r\n','').strip())
def rcpt_to(self, args):
if args.lower().startswith('to:'):
rcpt = args[3:].replace('\r\n','').strip()
self.recipients.append(strip_address(rcpt))
def data(self, args):
self.message = args
def quit(self, args):
pass
def reset(self, args):
self.sender = None
self.recipients = []
self.message = None
class SMTPThreadedServer(threading.Thread):
"""
Run a SMTP server for a single connection, within a dedicated thread
"""
def __init__(self, port):
self.port = port
self.server = SMTPServer(port)
self.store = SMTPServerStore()
threading.Thread.__init__(self)
def run(self):
# run from within the SMTP server thread
self.server.serve(impl = self.store)
def start(self):
# run from the main thread
self.server.start()
threading.Thread.start(self)
def stop(self):
# run from the main thread
self.server.stop()
# send a message to make the SMTP server quit gracefully
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect(('127.0.0.1', self.port))
r = s.send("QUIT\r\n")
except socket.error:
pass
s.close()
# wait for the SMTP server to complete (for up to 2 secs)
self.join(2.0)
# clean up the SMTP server (and force quit if needed)
self.server.terminate()
def get_sender(self):
return self.store.sender
def get_recipients(self):
return self.store.recipients
def get_message(self):
return self.store.message
def cleanup(self):
self.store.reset(None)
def smtp_address(fulladdr):
mo = email_re.search(fulladdr)
if mo:
return mo.group(0)
if start >= 0:
return fulladdr[start+1:-1]
return fulladdr
def decode_header(header):
""" Decode a MIME-encoded header value """
mo = header_re.match(header)
# header does not seem to be MIME-encoded
if not mo:
return header
# attempts to decode the hedear,
# following the specified MIME endoding and charset
try:
encoding = mo.group('code').lower()
if encoding == 'q':
val = quopri.decodestring(mo.group('value'), header=True)
elif encoding == 'b':
val = base64.decodestring(mo.group('value'))
else:
raise AssertionError, "unsupported encoding: %s" % encoding
header = unicode(val, mo.group('charset'))
except Exception, e:
raise AssertionError, e
return header
def parse_smtp_message(msg):
""" Split a SMTP message into its headers and body.
Returns a (headers, body) tuple
We do not use the email/MIME Python facilities here
as they may accept invalid RFC822 data, or data we do not
want to support nor generate """
headers = {}
lh = None
body = None
# last line does not contain the final line ending
msg += '\r\n'
for line in msg.splitlines(True):
if body != None:
# append current line to the body
if line[-2] == CR:
body += line[0:-2]
body += '\n'
else:
raise AssertionError, "body misses CRLF: %s (0x%x)" \
% (line, ord(line[-1]))
else:
if line[-2] != CR:
# RFC822 requires CRLF at end of field line
raise AssertionError, "header field misses CRLF: %s (0x%x)" \
% (line, ord(line[-1]))
# discards CR
line = line[0:-2]
if line.strip() == '':
# end of headers, body starts
body = ''
else:
val = None
if line[0] in ' \t':
# continution of the previous line
if not lh:
# unexpected multiline
raise AssertionError, \
"unexpected folded line: %s" % line
val = decode_header(line.strip(' \t'))
# appends the current line to the previous one
if not isinstance(headers[lh], tuple):
headers[lh] += val
else:
headers[lh][-1] = headers[lh][-1] + val
else:
# splits header name from value
(h, v) = line.split(':', 1)
val = decode_header(v.strip())
if headers.has_key(h):
if isinstance(headers[h], tuple):
headers[h] += val
else:
headers[h] = (headers[h], val)
else:
headers[h] = val
# stores the last header (for multilines headers)
lh = h
# returns the headers and the message body
return (headers, body)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/contentgen.py | trac/trac/tests/contentgen.py | #!/usr/bin/python
import random
try:
all_words = [x.strip() for x in open('/usr/share/dict/words').readlines() if x.strip().isalpha()]
except Exception:
all_words = [
'one',
'two',
'three',
'four',
'five',
'six',
'seven',
'eight',
'nine',
'ten',
]
def random_word():
word = random.choice(all_words)
# Do not return CamelCase words
if word[0].isupper():
word = word.lower().capitalize()
return word
_random_unique_camels = []
def random_unique_camel():
"""Returns a unique camelcase word pair"""
while True:
camel = random_word().title() + random_word().title()
if not camel in _random_unique_camels:
break
_random_unique_camels.append(camel)
return camel
def random_sentence(word_count=None):
if word_count == None:
word_count = random.randint(1, 20)
words = [random_word() for x in range(word_count)]
return '%s.' % ' '.join(words)
def random_paragraph(sentence_count=None):
if sentence_count == None:
sentence_count = random.randint(1, 10)
sentences = [random_sentence(random.randint(2, 15)) for x in range(sentence_count)]
return ' '.join(sentences)
def random_page(paragraph_count=None):
if paragraph_count == None:
paragraph_count = random.randint(1, 10)
paragraphs = [random_paragraph(random.randint(1, 5)) for x in range(paragraph_count)]
return '\r\n\r\n'.join(paragraphs)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/perm.py | trac/trac/tests/perm.py | from trac import perm
from trac.core import *
from trac.test import EnvironmentStub
import unittest
class DefaultPermissionStoreTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(enable=[perm.DefaultPermissionStore,
perm.DefaultPermissionGroupProvider])
self.store = perm.DefaultPermissionStore(self.env)
def tearDown(self):
self.env.reset_db()
def test_simple_actions(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('john', 'WIKI_MODIFY'),
('john', 'REPORT_ADMIN'),
('kate', 'TICKET_CREATE')])
self.assertEquals(['REPORT_ADMIN', 'WIKI_MODIFY'],
sorted(self.store.get_user_permissions('john')))
self.assertEquals(['TICKET_CREATE'],
self.store.get_user_permissions('kate'))
def test_simple_group(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('dev', 'WIKI_MODIFY'),
('dev', 'REPORT_ADMIN'),
('john', 'dev')])
self.assertEquals(['REPORT_ADMIN', 'WIKI_MODIFY'],
sorted(self.store.get_user_permissions('john')))
def test_nested_groups(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('dev', 'WIKI_MODIFY'),
('dev', 'REPORT_ADMIN'),
('admin', 'dev'),
('john', 'admin')])
self.assertEquals(['REPORT_ADMIN', 'WIKI_MODIFY'],
sorted(self.store.get_user_permissions('john')))
def test_mixed_case_group(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('Dev', 'WIKI_MODIFY'),
('Dev', 'REPORT_ADMIN'),
('Admin', 'Dev'),
('john', 'Admin')])
self.assertEquals(['REPORT_ADMIN', 'WIKI_MODIFY'],
sorted(self.store.get_user_permissions('john')))
def test_builtin_groups(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('authenticated', 'WIKI_MODIFY'),
('authenticated', 'REPORT_ADMIN'),
('anonymous', 'TICKET_CREATE')])
self.assertEquals(['REPORT_ADMIN', 'TICKET_CREATE', 'WIKI_MODIFY'],
sorted(self.store.get_user_permissions('john')))
self.assertEquals(['TICKET_CREATE'],
self.store.get_user_permissions('anonymous'))
def test_get_all_permissions(self):
self.env.db_transaction.executemany(
"INSERT INTO permission VALUES (%s,%s)",
[('dev', 'WIKI_MODIFY'),
('dev', 'REPORT_ADMIN'),
('john', 'dev')])
expected = [('dev', 'WIKI_MODIFY'),
('dev', 'REPORT_ADMIN'),
('john', 'dev')]
for res in self.store.get_all_permissions():
self.failIf(res not in expected)
class TestPermissionRequestor(Component):
implements(perm.IPermissionRequestor)
def get_permission_actions(self):
return ['TEST_CREATE', 'TEST_DELETE', 'TEST_MODIFY',
('TEST_CREATE', []),
('TEST_ADMIN', ['TEST_CREATE', 'TEST_DELETE']),
('TEST_ADMIN', ['TEST_MODIFY'])]
class PermissionSystemTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(enable=[perm.PermissionSystem,
perm.DefaultPermissionStore,
TestPermissionRequestor])
self.perm = perm.PermissionSystem(self.env)
def tearDown(self):
self.env.reset_db()
def test_all_permissions(self):
self.assertEqual({'EMAIL_VIEW': True, 'TRAC_ADMIN': True,
'TEST_CREATE': True, 'TEST_DELETE': True,
'TEST_MODIFY': True, 'TEST_ADMIN': True},
self.perm.get_user_permissions())
def test_simple_permissions(self):
self.perm.grant_permission('bob', 'TEST_CREATE')
self.perm.grant_permission('jane', 'TEST_DELETE')
self.perm.grant_permission('jane', 'TEST_MODIFY')
self.assertEqual({'TEST_CREATE': True},
self.perm.get_user_permissions('bob'))
self.assertEqual({'TEST_DELETE': True, 'TEST_MODIFY': True},
self.perm.get_user_permissions('jane'))
def test_meta_permissions(self):
self.perm.grant_permission('bob', 'TEST_CREATE')
self.perm.grant_permission('jane', 'TEST_ADMIN')
self.assertEqual({'TEST_CREATE': True},
self.perm.get_user_permissions('bob'))
self.assertEqual({'TEST_CREATE': True, 'TEST_DELETE': True,
'TEST_MODIFY': True, 'TEST_ADMIN': True},
self.perm.get_user_permissions('jane'))
def test_get_all_permissions(self):
self.perm.grant_permission('bob', 'TEST_CREATE')
self.perm.grant_permission('jane', 'TEST_ADMIN')
expected = [('bob', 'TEST_CREATE'),
('jane', 'TEST_ADMIN')]
for res in self.perm.get_all_permissions():
self.failIf(res not in expected)
def test_expand_actions_iter_7467(self):
# Check that expand_actions works with iterators (#7467)
perms = set(['EMAIL_VIEW', 'TRAC_ADMIN', 'TEST_DELETE', 'TEST_MODIFY',
'TEST_CREATE', 'TEST_ADMIN'])
self.assertEqual(perms, self.perm.expand_actions(['TRAC_ADMIN']))
self.assertEqual(perms, self.perm.expand_actions(iter(['TRAC_ADMIN'])))
class PermissionCacheTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(enable=[perm.DefaultPermissionStore,
perm.DefaultPermissionPolicy,
TestPermissionRequestor])
self.perm_system = perm.PermissionSystem(self.env)
# by-pass DefaultPermissionPolicy cache:
perm.DefaultPermissionPolicy.CACHE_EXPIRY = -1
self.perm_system.grant_permission('testuser', 'TEST_MODIFY')
self.perm_system.grant_permission('testuser', 'TEST_ADMIN')
self.perm = perm.PermissionCache(self.env, 'testuser')
def tearDown(self):
self.env.reset_db()
def test_contains(self):
self.assertEqual(True, 'TEST_MODIFY' in self.perm)
self.assertEqual(True, 'TEST_ADMIN' in self.perm)
self.assertEqual(False, 'TRAC_ADMIN' in self.perm)
def test_has_permission(self):
self.assertEqual(True, self.perm.has_permission('TEST_MODIFY'))
self.assertEqual(True, self.perm.has_permission('TEST_ADMIN'))
self.assertEqual(False, self.perm.has_permission('TRAC_ADMIN'))
def test_require(self):
self.perm.require('TEST_MODIFY')
self.perm.require('TEST_ADMIN')
self.assertRaises(perm.PermissionError, self.perm.require, 'TRAC_ADMIN')
def test_assert_permission(self):
self.perm.assert_permission('TEST_MODIFY')
self.perm.assert_permission('TEST_ADMIN')
self.assertRaises(perm.PermissionError,
self.perm.assert_permission, 'TRAC_ADMIN')
def test_cache(self):
self.perm.assert_permission('TEST_MODIFY')
self.perm.assert_permission('TEST_ADMIN')
self.perm_system.revoke_permission('testuser', 'TEST_ADMIN')
# Using cached GRANT here
self.perm.assert_permission('TEST_ADMIN')
def test_cache_shared(self):
# we need to start with an empty cache here (#7201)
perm1 = perm.PermissionCache(self.env, 'testcache')
perm1 = perm1('ticket', 1)
perm2 = perm1('ticket', 1) # share internal cache
self.perm_system.grant_permission('testcache', 'TEST_ADMIN')
perm1.assert_permission('TEST_ADMIN')
self.perm_system.revoke_permission('testcache', 'TEST_ADMIN')
# Using cached GRANT here (from shared cache)
perm2.assert_permission('TEST_ADMIN')
class TestPermissionPolicy(Component):
implements(perm.IPermissionPolicy)
def __init__(self):
self.allowed = {}
self.results = {}
def grant(self, username, permissions):
self.allowed.setdefault(username, set()).update(permissions)
def revoke(self, username, permissions):
self.allowed.setdefault(username, set()).difference_update(permissions)
def check_permission(self, action, username, resource, perm):
result = action in self.allowed.get(username, set()) or None
self.results[(username, action)] = result
return result
class PermissionPolicyTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(enable=[perm.DefaultPermissionStore,
perm.DefaultPermissionPolicy,
TestPermissionPolicy,
TestPermissionRequestor])
self.env.config.set('trac', 'permission_policies', 'TestPermissionPolicy')
self.policy = TestPermissionPolicy(self.env)
self.perm = perm.PermissionCache(self.env, 'testuser')
def tearDown(self):
self.env.reset_db()
def test_no_permissions(self):
self.assertRaises(perm.PermissionError,
self.perm.assert_permission, 'TEST_MODIFY')
self.assertRaises(perm.PermissionError,
self.perm.assert_permission, 'TEST_ADMIN')
self.assertEqual(self.policy.results,
{('testuser', 'TEST_MODIFY'): None,
('testuser', 'TEST_ADMIN'): None})
def test_grant_revoke_permissions(self):
self.policy.grant('testuser', ['TEST_MODIFY', 'TEST_ADMIN'])
self.assertEqual('TEST_MODIFY' in self.perm, True)
self.assertEqual('TEST_ADMIN' in self.perm, True)
self.assertEqual(self.policy.results,
{('testuser', 'TEST_MODIFY'): True,
('testuser', 'TEST_ADMIN'): True})
def test_policy_chaining(self):
self.env.config.set('trac', 'permission_policies', 'TestPermissionPolicy,DefaultPermissionPolicy')
self.policy.grant('testuser', ['TEST_MODIFY'])
system = perm.PermissionSystem(self.env)
system.grant_permission('testuser', 'TEST_ADMIN')
self.assertEqual(list(system.policies),
[self.policy,
perm.DefaultPermissionPolicy(self.env)])
self.assertEqual('TEST_MODIFY' in self.perm, True)
self.assertEqual('TEST_ADMIN' in self.perm, True)
self.assertEqual(self.policy.results,
{('testuser', 'TEST_MODIFY'): True,
('testuser', 'TEST_ADMIN'): None})
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DefaultPermissionStoreTestCase, 'test'))
suite.addTest(unittest.makeSuite(PermissionSystemTestCase, 'test'))
suite.addTest(unittest.makeSuite(PermissionCacheTestCase, 'test'))
suite.addTest(unittest.makeSuite(PermissionPolicyTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/env.py | trac/trac/tests/env.py | from __future__ import with_statement
from trac import db_default
from trac.env import Environment
import os.path
import unittest
import tempfile
import shutil
class EnvironmentTestCase(unittest.TestCase):
def setUp(self):
env_path = os.path.join(tempfile.gettempdir(), 'trac-tempenv')
self.addCleanup(self.cleanupEnvPath, env_path)
self.env = Environment(env_path, create=True)
def tearDown(self):
with self.env.db_query as db:
db.close()
self.env.shutdown() # really closes the db connections
shutil.rmtree(self.env.path)
def cleanupEnvPath(self, path):
if os.path.exists(path):
shutil.rmtree(path)
def test_get_version(self):
"""Testing env.get_version"""
assert self.env.get_version() == db_default.db_version
def test_get_known_users(self):
"""Testing env.get_known_users"""
with self.env.db_transaction as db:
db.executemany("INSERT INTO session VALUES (%s,%s,0)",
[('123', 0),('tom', 1), ('joe', 1), ('jane', 1)])
db.executemany("INSERT INTO session_attribute VALUES (%s,%s,%s,%s)",
[('123', 0, 'email', 'a@example.com'),
('tom', 1, 'name', 'Tom'),
('tom', 1, 'email', 'tom@example.com'),
('joe', 1, 'email', 'joe@example.com'),
('jane', 1, 'name', 'Jane')])
users = {}
for username, name, email in self.env.get_known_users():
users[username] = (name, email)
assert not users.has_key('anonymous')
self.assertEqual(('Tom', 'tom@example.com'), users['tom'])
self.assertEqual((None, 'joe@example.com'), users['joe'])
self.assertEqual(('Jane', None), users['jane'])
def suite():
return unittest.makeSuite(EnvironmentTestCase,'test')
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/better_twill.py | trac/trac/tests/functional/better_twill.py | #!/usr/bin/python
"""better_twill is a small wrapper around twill to set some sane defaults and
monkey-patch some better versions of some of twill's methods.
It also handles twill's absense.
"""
import os
from os.path import abspath, dirname, join
import sys
from pkg_resources import parse_version as pv
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# On OSX lxml needs to be imported before twill to avoid Resolver issues
# somehow caused by the mac specific 'ic' module
try:
from lxml import etree
except ImportError:
pass
try:
import twill
except ImportError:
twill = None
# When twill tries to connect to a site before the site is up, it raises an
# exception. In 0.9b1, it's urlib2.URLError, but in -latest, it's
# twill.browser.BrowserStateError.
try:
from twill.browser import BrowserStateError as ConnectError
except ImportError:
from urllib2 import URLError as ConnectError
if twill:
# We want Trac to generate valid html, and therefore want to test against
# the html as generated by Trac. "tidy" tries to clean up broken html, and
# is responsible for one difficult to track down testcase failure (for
# #5497). Therefore we turn it off here.
twill.commands.config('use_tidy', '0')
# We use a transparent proxy to access the global browser object through
# twill.get_browser(), as the browser can be destroyed by browser_reset()
# (see #7472).
class _BrowserProxy(object):
def __getattribute__(self, name):
return getattr(twill.get_browser(), name)
def __setattr__(self, name, value):
setattr(twill.get_browser(), name, value)
# setup short names to reduce typing
# This twill browser (and the tc commands that use it) are essentially
# global, and not tied to our test fixture.
tc = twill.commands
b = _BrowserProxy()
# Setup XHTML validation for all retrieved pages
try:
from lxml import etree
except ImportError:
print "SKIP: validation of XHTML output in functional tests " \
"(no lxml installed)"
etree = None
if etree and pv(etree.__version__) < pv('2.0.0'):
# 2.0.7 and 2.1.x are known to work.
print "SKIP: validation of XHTML output in functional tests " \
"(lxml < 2.0, api incompatibility)"
etree = None
if etree:
class _Resolver(etree.Resolver):
base_dir = dirname(abspath(__file__))
def resolve(self, system_url, public_id, context):
return self.resolve_filename(join(self.base_dir,
system_url.split("/")[-1]),
context)
_parser = etree.XMLParser(dtd_validation=True)
_parser.resolvers.add(_Resolver())
etree.set_default_parser(_parser)
def _format_error_log(data, log):
msg = []
for entry in log:
context = data.splitlines()[max(0, entry.line - 5):
entry.line + 6]
msg.append("\n# %s\n# URL: %s\n# Line %d, column %d\n\n%s\n"
% (entry.message, entry.filename,
entry.line, entry.column,
"\n".join([each.decode('utf-8') for each in context])))
return "\n".join(msg).encode('ascii', 'xmlcharrefreplace')
def _validate_xhtml(func_name, *args, **kwargs):
page = b.get_html()
if "xhtml1-strict.dtd" not in page:
return
etree.clear_error_log()
try:
# lxml will try to convert the URL to unicode by itself,
# this won't work for non-ascii URLs, so help him
url = b.get_url()
if isinstance(url, str):
url = unicode(url, 'latin1')
etree.parse(StringIO(page), base_url=url)
except etree.XMLSyntaxError, e:
raise twill.errors.TwillAssertionError(
_format_error_log(page, e.error_log))
b._post_load_hooks.append(_validate_xhtml)
# When we can't find something we expected, or find something we didn't
# expect, it helps the debugging effort to have a copy of the html to
# analyze.
def twill_write_html():
"""Write the current html to a file. Name the file based on the
current testcase.
"""
frame = sys._getframe()
while frame:
if frame.f_code.co_name in ('runTest', 'setUp', 'tearDown'):
testcase = frame.f_locals['self']
testname = testcase.__class__.__name__
tracdir = testcase._testenv.tracdir
break
frame = frame.f_back
else:
# We didn't find a testcase in the stack, so we have no clue what's
# going on.
raise Exception("No testcase was found on the stack. This was "
"really not expected, and I don't know how to handle it.")
filename = os.path.join(tracdir, 'log', "%s.html" % testname)
html_file = open(filename, 'w')
html_file.write(b.get_html())
html_file.close()
return filename
# Twill isn't as helpful with errors as I'd like it to be, so we replace
# the formvalue function. This would be better done as a patch to Twill.
def better_formvalue(form, field, value, fv=tc.formvalue):
try:
fv(form, field, value)
except (twill.errors.TwillAssertionError,
twill.utils.ClientForm.ItemNotFoundError), e:
filename = twill_write_html()
args = e.args + (filename,)
raise twill.errors.TwillAssertionError(*args)
tc.formvalue = better_formvalue
tc.fv = better_formvalue
# Twill requires that on pages with more than one form, you have to click a
# field within the form before you can click submit. There are a number of
# cases where the first interaction a user would have with a form is
# clicking on a button. This enhancement allows us to specify the form to
# click on.
def better_browser_submit(fieldname=None, formname=None, browser=b, old_submit=b.submit):
if formname is not None: # enhancement to directly specify the form
browser._browser.form = browser.get_form(formname)
old_submit(fieldname)
b.submit = better_browser_submit
def better_submit(fieldname=None, formname=None):
b.submit(fieldname, formname)
tc.submit = better_submit
# Twill's formfile function leaves a filehandle open which prevents the
# file from being deleted on Windows. Since we would just assume use a
# StringIO object in the first place, allow the file-like object to be
# provided directly.
def better_formfile(formname, fieldname, filename, content_type=None,
fp=None):
if not fp:
filename = filename.replace('/', os.path.sep)
temp_fp = open(filename, 'rb')
data = temp_fp.read()
temp_fp.close()
fp = StringIO(data)
form = b.get_form(formname)
control = b.get_form_field(form, fieldname)
if not control.is_of_kind('file'):
raise twill.errors.TwillException('ERROR: field is not a file '
'upload field!')
b.clicked(form, control)
control.add_file(fp, content_type, filename)
tc.formfile = better_formfile
# Twill's tc.find() does not provide any guidance on what we got instead of
# what was expected.
def better_find(what, flags='', tcfind=tc.find):
try:
tcfind(what, flags)
except twill.errors.TwillAssertionError, e:
filename = twill_write_html()
args = e.args + (filename,)
raise twill.errors.TwillAssertionError(*args)
tc.find = better_find
def better_notfind(what, flags='', tcnotfind=tc.notfind):
try:
tcnotfind(what, flags)
except twill.errors.TwillAssertionError, e:
filename = twill_write_html()
args = e.args + (filename,)
raise twill.errors.TwillAssertionError(*args)
tc.notfind = better_notfind
# Same for tc.url - no hint about what went wrong!
def better_url(should_be, tcurl=tc.url):
try:
tcurl(should_be)
except twill.errors.TwillAssertionError, e:
filename = twill_write_html()
args = e.args + (filename,)
raise twill.errors.TwillAssertionError(*args)
tc.url = better_url
else:
b = tc = None
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/compat.py | trac/trac/tests/functional/compat.py | #!/usr/bin/python
import os
import shutil
from trac.util.compat import close_fds
# On Windows, shutil.rmtree doesn't remove files with the read-only
# attribute set, so this function explicitly removes it on every error
# before retrying. Even on Linux, shutil.rmtree chokes on read-only
# directories, so we use this version in all cases.
# Fix from http://bitten.edgewall.org/changeset/521
def rmtree(root):
"""Catch shutil.rmtree failures on Windows when files are read-only."""
def _handle_error(fn, path, excinfo):
os.chmod(path, 0666)
fn(path)
return shutil.rmtree(root, onerror=_handle_error)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/testenv.py | trac/trac/tests/functional/testenv.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
"""Object for creating and destroying a Trac environment for testing purposes.
Provides some Trac environment-wide utility functions, and a way to call
:command:`trac-admin` without it being on the path."""
import os
import time
import signal
import sys
import errno
import locale
from subprocess import call, Popen, PIPE, STDOUT
from trac.env import open_environment
from trac.test import EnvironmentStub, get_dburi
from trac.tests.functional.compat import rmtree
from trac.tests.functional import logfile
from trac.tests.functional.better_twill import tc, ConnectError
from trac.util.compat import close_fds
# TODO: refactor to support testing multiple frontends, backends
# (and maybe repositories and authentication).
#
# Frontends::
# tracd, ap2+mod_python, ap2+mod_wsgi, ap2+mod_fastcgi, ap2+cgi,
# lighty+fastcgi, lighty+cgi, cherrypy+wsgi
#
# Backends::
# sqlite3+pysqlite2, postgres+psycopg2 python bindings,
# mysql+mysqldb with server v4, mysql+mysqldb with server v5
# (those need to test search escaping, among many other things like long
# paths in browser and unicode chars being allowed/translating...)
class FunctionalTestEnvironment(object):
"""Common location for convenience functions that work with the test
environment on Trac. Subclass this and override some methods if you are
using a different :term:`VCS`.
:class:`FunctionalTestEnvironment` requires a `dirname` in which
the test repository and Trac environment will be created, `port`
for the :command:`tracd` webserver to run on, and the `url` which
can access this (usually ``localhost``).
"""
def __init__(self, dirname, port, url):
"""Create a :class:`FunctionalTestEnvironment`, see the class itself
for parameter information."""
self.url = url
self.command_cwd = os.path.normpath(os.path.join(dirname, '..'))
self.dirname = os.path.abspath(dirname)
self.tracdir = os.path.join(self.dirname, "trac")
self.htpasswd = os.path.join(self.dirname, "htpasswd")
self.port = port
self.pid = None
self.init()
self.destroy()
time.sleep(0.1) # Avoid race condition on Windows
self.create()
locale.setlocale(locale.LC_ALL, '')
trac_src = '.'
@property
def dburi(self):
dburi = get_dburi()
if dburi == 'sqlite::memory:':
# functional tests obviously can't work with the in-memory database
dburi = 'sqlite:db/trac.db'
return dburi
def destroy(self):
"""Remove all of the test environment data."""
env = EnvironmentStub(path=self.tracdir, destroying=True)
env.destroy_db()
env.shutdown()
self.destroy_repo()
if os.path.exists(self.dirname):
rmtree(self.dirname)
repotype = 'svn'
def init(self):
""" Hook for modifying settings or class attributes before
any methods are called. """
pass
def create_repo(self):
"""Hook for creating the repository."""
# The default test environment does not include a source repo
def destroy_repo(self):
"""Hook for removing the repository."""
# The default test environment does not include a source repo
def post_create(self, env):
"""Hook for modifying the environment after creation. For example, to
set configuration like::
def post_create(self, env):
env.config.set('git', 'path', '/usr/bin/git')
env.config.save()
"""
pass
def get_enabled_components(self):
"""Return a list of components that should be enabled after
environment creation. For anything more complicated, use the
:meth:`post_create` method.
"""
return ['tracopt.versioncontrol.svn.*']
def create(self):
"""Create a new test environment.
This sets up Trac, calls :meth:`create_repo` and sets up
authentication.
"""
os.mkdir(self.dirname)
self.create_repo()
self._tracadmin('initenv', self.tracdir, self.dburi, self.repotype,
self.repo_path_for_initenv())
if call([sys.executable,
os.path.join(self.trac_src, 'contrib', 'htpasswd.py'), "-c",
"-b", self.htpasswd, "admin", "admin"], close_fds=close_fds,
cwd=self.command_cwd):
raise Exception('Unable to setup admin password')
self.adduser('user')
self._tracadmin('permission', 'add', 'admin', 'TRAC_ADMIN')
# Setup Trac logging
env = self.get_trac_environment()
env.config.set('logging', 'log_type', 'file')
for component in self.get_enabled_components():
env.config.set('components', component, 'enabled')
env.config.save()
self.post_create(env)
def adduser(self, user):
"""Add a user to the environment. The password will be set to the
same as username."""
if call([sys.executable, os.path.join(self.trac_src, 'contrib',
'htpasswd.py'), '-b', self.htpasswd,
user, user], close_fds=close_fds, cwd=self.command_cwd):
raise Exception('Unable to setup password for user "%s"' % user)
def _tracadmin(self, *args):
"""Internal utility method for calling trac-admin"""
proc = Popen([sys.executable, os.path.join(self.trac_src, 'trac',
'admin', 'console.py'), self.tracdir]
+ list(args), stdout=PIPE, stderr=STDOUT,
close_fds=close_fds, cwd=self.command_cwd)
out = proc.communicate()[0]
if proc.returncode:
print(out)
logfile.write(out)
raise Exception('Failed with exitcode %s running trac-admin ' \
'with %r' % (proc.returncode, args))
def start(self):
"""Starts the webserver, and waits for it to come up."""
if 'FIGLEAF' in os.environ:
exe = os.environ['FIGLEAF']
if ' ' in exe: # e.g. 'coverage run'
args = exe.split()
else:
args = [exe]
else:
args = [sys.executable]
options = ["--port=%s" % self.port, "-s", "--hostname=127.0.0.1",
"--basic-auth=trac,%s," % self.htpasswd]
if 'TRAC_TEST_TRACD_OPTIONS' in os.environ:
options += os.environ['TRAC_TEST_TRACD_OPTIONS'].split()
args.append(os.path.join(self.trac_src, 'trac', 'web',
'standalone.py'))
server = Popen(args + options + [self.tracdir],
stdout=logfile, stderr=logfile,
close_fds=close_fds,
cwd=self.command_cwd,
)
self.pid = server.pid
# Verify that the url is ok
timeout = 30
while timeout:
try:
tc.go(self.url)
break
except ConnectError:
time.sleep(1)
timeout -= 1
else:
raise Exception('Timed out waiting for server to start.')
tc.url(self.url)
def stop(self):
"""Stops the webserver, if running
FIXME: probably needs a nicer way to exit for coverage to work
"""
if self.pid:
if os.name == 'nt':
# Untested
res = call(["taskkill", "/f", "/pid", str(self.pid)],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
else:
os.kill(self.pid, signal.SIGTERM)
try:
os.waitpid(self.pid, 0)
except OSError, e:
if e.errno != errno.ESRCH:
raise
def restart(self):
"""Restarts the webserver"""
self.stop()
self.start()
def get_trac_environment(self):
"""Returns a Trac environment object"""
return open_environment(self.tracdir, use_cache=True)
def repo_path_for_initenv(self):
"""Default to no repository"""
return "''" # needed for Python 2.3 and 2.4 on win32
def call_in_workdir(self, args, environ=None):
proc = Popen(args, stdout=PIPE, stderr=logfile,
close_fds=close_fds, cwd=self.work_dir(), env=environ)
(data, _) = proc.communicate()
if proc.wait():
raise Exception('Unable to run command %s in %s' %
(args, self.work_dir()))
logfile.write(data)
return data
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/svntestenv.py | trac/trac/tests/functional/svntestenv.py | import os
import re
from subprocess import call
from testenv import FunctionalTestEnvironment
from trac.tests.functional.compat import close_fds
from trac.tests.functional import logfile
class SvnFunctionalTestEnvironment(FunctionalTestEnvironment):
def work_dir(self):
return os.path.join(self.dirname, 'workdir')
def repo_path_for_initenv(self):
return os.path.join(self.dirname, 'repo')
def create_repo(self):
"""
Initialize a repo of the type :attr:`self.repotype`.
"""
if call(["svnadmin", "create", self.repo_path_for_initenv()],
stdout=logfile, stderr=logfile, close_fds=close_fds):
raise Exception('unable to create subversion repository')
if call(['svn', 'co', self.repo_url(), self.work_dir()], stdout=logfile,
stderr=logfile, close_fds=close_fds):
raise Exception('Checkout from %s failed.' % self.repo_url())
def destroy_repo(self):
"""The deletion of the testenvironment will remove the repo as well."""
pass
def repo_url(self):
"""Returns the url of the Subversion repository for this test
environment.
"""
repodir = self.repo_path_for_initenv()
if os.name == 'nt':
return 'file:///' + repodir.replace("\\", "/")
else:
return 'file://' + repodir
def svn_mkdir(self, paths, msg, username='admin'):
"""Subversion helper to create a new directory within the main
repository. Operates directly on the repository url, so a working
copy need not exist.
Example::
self._testenv.svn_mkdir(["abc", "def"], "Add dirs")
"""
self.call_in_workdir(['svn', '--username=%s' % username, 'mkdir', '-m', msg]
+ [self.repo_url() + '/' + d for d in paths])
self.call_in_workdir(['svn', 'update'])
def svn_add(self, filename, data):
"""Subversion helper to add a file to the given path within the main
repository.
Example::
self._testenv.svn_add("root.txt", "Hello World")
"""
f = open(os.path.join(self.work_dir(), filename), 'w')
f.write(data)
f.close()
self.call_in_workdir(['svn', 'add', filename])
environ = os.environ.copy()
environ['LC_ALL'] = 'C' # Force English messages in svn
output = self.call_in_workdir(['svn', '--username=admin', 'commit', '-m',
'Add %s' % filename, filename], environ=environ)
try:
revision = re.search(r'Committed revision ([0-9]+)\.',
output).group(1)
except Exception, e:
args = e.args + (output, )
raise Exception(*args)
return int(revision)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/tester.py | trac/trac/tests/functional/tester.py | #!/usr/bin/python
"""The :class:`FunctionalTester` object provides a higher-level interface to
working with a Trac environment to make test cases more succinct.
"""
from trac.tests.functional import internal_error
from trac.tests.functional.better_twill import tc, b
from trac.tests.contentgen import random_page, random_sentence, random_word, \
random_unique_camel
from trac.util.text import unicode_quote
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class FunctionalTester(object):
"""Provides a library of higher-level operations for interacting with a
test environment.
It makes assumptions such as knowing what ticket number is next, so
avoid doing things manually in a :class:`FunctionalTestCase` when you can.
"""
def __init__(self, url):
"""Create a :class:`FunctionalTester` for the given Trac URL and
Subversion URL"""
self.url = url
self.ticketcount = 0
# Connect, and login so we can run tests.
self.go_to_front()
self.login('admin')
def login(self, username):
"""Login as the given user"""
tc.add_auth("", self.url, username, username)
self.go_to_front()
tc.find("Login")
tc.follow("Login")
# We've provided authentication info earlier, so this should
# redirect back to the base url.
tc.find("logged in as %s" % username)
tc.find("Logout")
tc.url(self.url)
tc.notfind(internal_error)
def logout(self):
"""Logout"""
tc.follow("Logout")
tc.notfind(internal_error)
def create_ticket(self, summary=None, info=None):
"""Create a new (random) ticket in the test environment. Returns
the new ticket number.
:param summary:
may optionally be set to the desired summary
:param info:
may optionally be set to a dictionary of field value pairs for
populating the ticket. ``info['summary']`` overrides summary.
`summary` and `description` default to randomly-generated values.
"""
self.go_to_front()
tc.follow('New Ticket')
tc.notfind(internal_error)
if summary == None:
summary = random_sentence(4)
tc.formvalue('propertyform', 'field_summary', summary)
tc.formvalue('propertyform', 'field_description', random_page())
if info:
for field, value in info.items():
tc.formvalue('propertyform', 'field_%s' % field, value)
tc.submit('submit')
# we should be looking at the newly created ticket
tc.url(self.url + '/ticket/%s' % (self.ticketcount + 1))
# Increment self.ticketcount /after/ we've verified that the ticket
# was created so a failure does not trigger spurious later
# failures.
self.ticketcount += 1
# verify the ticket creation event shows up in the timeline
self.go_to_timeline()
tc.formvalue('prefs', 'ticket', True)
tc.submit()
tc.find('Ticket.*#%s.*created' % self.ticketcount)
return self.ticketcount
def quickjump(self, search):
"""Do a quick search to jump to a page."""
tc.formvalue('search', 'q', search)
tc.submit()
tc.notfind(internal_error)
def go_to_front(self):
"""Go to the Trac front page"""
tc.go(self.url)
tc.url(self.url)
tc.notfind(internal_error)
def go_to_ticket(self, ticketid):
"""Surf to the page for the given ticket ID. Assumes ticket
exists."""
ticket_url = self.url + "/ticket/%s" % ticketid
tc.go(ticket_url)
tc.url(ticket_url)
def go_to_wiki(self, name):
"""Surf to the page for the given wiki page."""
# Used to go based on a quickjump, but if the wiki pagename isn't
# camel case, that won't work.
wiki_url = self.url + '/wiki/%s' % name
tc.go(wiki_url)
tc.url(wiki_url)
def go_to_timeline(self):
"""Surf to the timeline page."""
self.go_to_front()
tc.follow('Timeline')
tc.url(self.url + '/timeline')
def go_to_query(self):
"""Surf to the custom query page."""
self.go_to_front()
tc.follow('View Tickets')
tc.follow('Custom Query')
tc.url(self.url + '/query')
def go_to_admin(self):
"""Surf to the webadmin page."""
self.go_to_front()
tc.follow('\\bAdmin\\b')
def go_to_roadmap(self):
"""Surf to the roadmap page."""
self.go_to_front()
tc.follow('\\bRoadmap\\b')
tc.url(self.url + '/roadmap')
def add_comment(self, ticketid, comment=None):
"""Adds a comment to the given ticket ID, assumes ticket exists."""
self.go_to_ticket(ticketid)
if comment is None:
comment = random_sentence()
tc.formvalue('propertyform', 'comment', comment)
tc.submit("submit")
# Verify we're where we're supposed to be.
# The fragment is stripped since Python 2.7.1, see:
# http://trac.edgewall.org/ticket/9990#comment:18
tc.url(self.url + '/ticket/%s(?:#comment:.*)?$' % ticketid)
return comment
def attach_file_to_ticket(self, ticketid, data=None, tempfilename=None,
description=None, replace=False,
content_type=None):
"""Attaches a file to the given ticket id, with random data if none is
provided. Assumes the ticket exists.
"""
if data is None:
data = random_page()
if description is None:
description = random_sentence()
if tempfilename is None:
tempfilename = random_word()
self.go_to_ticket(ticketid)
# set the value to what it already is, so that twill will know we
# want this form.
tc.formvalue('attachfile', 'action', 'new')
tc.submit()
tc.url(self.url + "/attachment/ticket/" \
"%s/\\?action=new&attachfilebutton=Attach\\+file" % ticketid)
fp = StringIO(data)
tc.formfile('attachment', 'attachment', tempfilename,
content_type=content_type, fp=fp)
tc.formvalue('attachment', 'description', description)
if replace:
tc.formvalue('attachment', 'replace', True)
tc.submit()
tc.url(self.url + '/attachment/ticket/%s/$' % ticketid)
return tempfilename
def clone_ticket(self, ticketid):
"""Create a clone of the given ticket id using the clone button."""
ticket_url = self.url + '/ticket/%s' % ticketid
tc.go(ticket_url)
tc.url(ticket_url)
tc.formvalue('clone', 'clone', 'Clone')
tc.submit()
# we should be looking at the newly created ticket
self.ticketcount += 1
tc.url(self.url + "/ticket/%s" % self.ticketcount)
return self.ticketcount
def create_wiki_page(self, page, content=None):
"""Creates the specified wiki page, with random content if none is
provided.
"""
if content == None:
content = random_page()
page_url = self.url + "/wiki/" + page
tc.go(page_url)
tc.url(page_url)
tc.find("The page %s does not exist." % page)
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.url(page_url + '\\?action=edit')
tc.formvalue('edit', 'text', content)
tc.submit('save')
tc.url(page_url+'$')
# verify the event shows up in the timeline
self.go_to_timeline()
tc.formvalue('prefs', 'wiki', True)
tc.submit()
tc.find(page + ".*created")
def attach_file_to_wiki(self, name, data=None, tempfilename=None):
"""Attaches a file to the given wiki page, with random content if none
is provided. Assumes the wiki page exists.
"""
if data == None:
data = random_page()
if tempfilename is None:
tempfilename = random_word()
self.go_to_wiki(name)
# set the value to what it already is, so that twill will know we
# want this form.
tc.formvalue('attachfile', 'action', 'new')
tc.submit()
tc.url(self.url + "/attachment/wiki/" \
"%s/\\?action=new&attachfilebutton=Attach\\+file" % name)
fp = StringIO(data)
tc.formfile('attachment', 'attachment', tempfilename, fp=fp)
tc.formvalue('attachment', 'description', random_sentence())
tc.submit()
tc.url(self.url + '/attachment/wiki/%s/$' % name)
return tempfilename
def create_milestone(self, name=None, due=None):
"""Creates the specified milestone, with a random name if none is
provided. Returns the name of the milestone.
"""
if name == None:
name = random_unique_camel()
milestone_url = self.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.formvalue('addmilestone', 'name', name)
if due:
# TODO: How should we deal with differences in date formats?
tc.formvalue('addmilestone', 'duedate', due)
tc.submit()
tc.notfind(internal_error)
tc.notfind('Milestone .* already exists')
tc.url(milestone_url)
tc.find(name)
# Make sure it's on the roadmap.
tc.follow('Roadmap')
tc.url(self.url + "/roadmap")
tc.find('Milestone:.*%s' % name)
tc.follow(name)
tc.url('%s/milestone/%s' % (self.url, unicode_quote(name)))
if not due:
tc.find('No date set')
return name
def create_component(self, name=None, user=None):
"""Creates the specified component, with a random camel-cased name if
none is provided. Returns the name."""
if name == None:
name = random_unique_camel()
component_url = self.url + "/admin/ticket/components"
tc.go(component_url)
tc.url(component_url)
tc.formvalue('addcomponent', 'name', name)
if user != None:
tc.formvalue('addcomponent', 'owner', user)
tc.submit()
# Verify the component appears in the component list
tc.url(component_url)
tc.find(name)
tc.notfind(internal_error)
# TODO: verify the component shows up in the newticket page
return name
def create_enum(self, kind, name=None):
"""Helper to create the specified enum (used for ``priority``,
``severity``, etc). If no name is given, a unique random word is used.
The name is returned.
"""
if name == None:
name = random_unique_camel()
priority_url = self.url + "/admin/ticket/" + kind
tc.go(priority_url)
tc.url(priority_url)
tc.formvalue('addenum', 'name', name)
tc.submit()
tc.url(priority_url)
tc.find(name)
tc.notfind(internal_error)
return name
def create_priority(self, name=None):
"""Create a new priority enum"""
return self.create_enum('priority', name)
def create_resolution(self, name=None):
"""Create a new resolution enum"""
return self.create_enum('resolution', name)
def create_severity(self, name=None):
"""Create a new severity enum"""
return self.create_enum('severity', name)
def create_type(self, name=None):
"""Create a new ticket type enum"""
return self.create_enum('type', name)
def create_version(self, name=None, releasetime=None):
"""Create a new version. The name defaults to a random camel-cased
word if not provided."""
version_admin = self.url + "/admin/ticket/versions"
if name == None:
name = random_unique_camel()
tc.go(version_admin)
tc.url(version_admin)
tc.formvalue('addversion', 'name', name)
if releasetime != None:
tc.formvalue('addversion', 'time', releasetime)
tc.submit()
tc.url(version_admin)
tc.find(name)
tc.notfind(internal_error)
# TODO: verify releasetime
def create_report(self, title, query, description):
"""Create a new report with the given title, query, and description"""
self.go_to_front()
tc.follow('View Tickets')
tc.formvalue('create_report', 'action', 'new') # select the right form
tc.submit()
tc.find('New Report')
tc.notfind(internal_error)
tc.formvalue('edit_report', 'title', title)
tc.formvalue('edit_report', 'description', description)
tc.formvalue('edit_report', 'query', query)
tc.submit()
reportnum = b.get_url().split('/')[-1]
# TODO: verify the url is correct
# TODO: verify the report number is correct
# TODO: verify the report does not cause an internal error
# TODO: verify the title appears on the report list
return reportnum
def ticket_set_milestone(self, ticketid, milestone):
"""Set the milestone on a given ticket."""
self.go_to_ticket(ticketid)
tc.formvalue('propertyform', 'milestone', milestone)
tc.submit('submit')
# TODO: verify the change occurred.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/__init__.py | trac/trac/tests/functional/__init__.py | #!/usr/bin/python
"""functional_tests
While unittests work well for testing facets of an implementation, they fail to
provide assurances that the user-visible functions work in practice. Here, we
complement the unittests with functional tests that drive the system as a user
would to verify user visible functionality. These functional tests are run as
part of the unittests.
So, we use Twill to verify Trac's functionality as served by tracd (and in the
future, other frontends).
Unlike most unittests, we setup a single fixture against which we run all the
testcases. This is for two reasons: Primarily, that provides us with a more
complex set of data to test against and thus more room for triggering bugs.
Secondarily, the cost of setting up a new Trac environment and Subversion
repository is significant, so recreating the fixture for each test would be
very costly.
There are two primary objects involved in the testing, the
FunctionalTestEnvironment and the FunctionalTester.
FunctionalTestEnvironment represents the Trac environment, the Subversion
repository, and the server. The server will be run on a random local port in
the range 8000-8999. A subdirectory named 'tracenv' will be created containing
the Trac environment, Subversion repository, and the user authentication
information. An 'admin' user is created and given TRAC_ADMIN privs early in
the testing. There are other users added as well. All accounts are setup with
a password equalling the username. The test environment is left behind after
the testing has completed to assist in debugging.
FunctionalTester provides code reuse for the testcases to allow a higher-level
description of the more complicated bugs. For example, creating a new ticket
is the first step in regression testing many things, so FunctionalTester
provides a create_ticket() method. That method is written as if it were itself
a testcase for creating a ticket, so there is a testcase that simply calls that
method, and other testcases that use it as a higher-level step don't have to
worry about basic issues such as if the ticket was successfully created.
Requirements:
- Twill (http://twill.idyll.org/)
- lxml for XHTML validation (optional)
"""
import os
import signal
import sys
import time
import shutil
import stat
import unittest
import exceptions
import trac
from trac.tests.functional.compat import close_fds, rmtree
# Handle missing twill so we can print a useful 'SKIP'
# message. We import subprocess first to allow customizing it on Windows
# to select pywin32 in favor of _subprocess for low-level calls. If Twill
# is allowed to load first, its (unmodified) copy will always be loaded.
import subprocess
from better_twill import twill, b, tc, ConnectError
try:
# This is the first indicator of whether the subversion bindings are
# correctly installed.
from svn import core
has_svn = True
except ImportError:
has_svn = False
from datetime import datetime, timedelta
from trac.tests.contentgen import random_sentence, random_page, random_word, \
random_unique_camel
from trac.test import TestSetup, TestCaseSetup
internal_error = 'Trac detected an internal error:'
trac_source_tree = os.path.normpath(os.path.join(trac.__file__, '..', '..'))
# testing.log gets any unused output from subprocesses
logfile = open(os.path.join(trac_source_tree, 'testing.log'), 'w')
if twill:
# functional-testing.log gets the twill output
twill.set_output(open(os.path.join(trac_source_tree,
'functional-testing.log'), 'w'))
from trac.tests.functional.testenv import FunctionalTestEnvironment
from trac.tests.functional.svntestenv import SvnFunctionalTestEnvironment
from trac.tests.functional.tester import FunctionalTester
class FunctionalTestSuite(TestSetup):
"""TestSuite that provides a test fixture containing a
FunctionalTestEnvironment and a FunctionalTester.
"""
if has_svn:
env_class = SvnFunctionalTestEnvironment
else:
env_class = FunctionalTestEnvironment
tester_class = FunctionalTester
def setUp(self, port=None):
"""If no port is specified, use a semi-random port and subdirectory
'testenv'; but if a port is specified, use that port and
subdirectory 'testenv<portnum>'.
"""
if port is None:
port = 8000 + os.getpid() % 1000
dirname = "testenv"
else:
dirname = "testenv%s" % port
dirname = os.path.join(trac_source_tree, dirname)
baseurl = "http://127.0.0.1:%s" % port
self._testenv = self.env_class(dirname, port, baseurl)
self._testenv.start()
self._tester = self.tester_class(baseurl)
self.fixture = (self._testenv, self._tester)
def tearDown(self):
self._testenv.stop()
class FunctionalTestCaseSetup(TestCaseSetup):
"""Convenience class to expand the fixture into the _testenv and
_tester attributes."""
def setUp(self):
self._testenv, self._tester = self.fixture
class FunctionalTwillTestCaseSetup(FunctionalTestCaseSetup):
failureException = twill.errors.TwillAssertionError
else:
# We're going to have to skip the functional tests
class FunctionalTwillTestCaseSetup:
pass
class FunctionalTestCaseSetup:
pass
# Twill's find command accepts regexes; some convenient but complex regexes
# & regex factories are provided here (only one so far):
def regex_owned_by(username):
return '(Owned by:(<[^>]*>|\\n| )*%s)' % username
def suite():
if twill:
from trac.tests.functional.testcases import suite
suite = suite()
else:
diagnostic = "SKIP: functional tests"
if not twill:
diagnostic += " (no twill installed)"
print diagnostic
# No tests to run, provide an empty suite.
suite = unittest.TestSuite()
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/tests/functional/testcases.py | trac/trac/tests/functional/testcases.py | # -*- encoding: utf-8 -*-
#!/usr/bin/python
import os
from trac.tests.functional import *
class RegressionTestRev6017(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of the plugin reload fix in r6017"""
# Setup the DeleteTicket plugin
plugin = open(os.path.join(self._testenv.command_cwd, 'sample-plugins',
'workflow', 'DeleteTicket.py')).read()
open(os.path.join(self._testenv.tracdir, 'plugins', 'DeleteTicket.py'),
'w').write(plugin)
env = self._testenv.get_trac_environment()
prevconfig = env.config.get('ticket', 'workflow')
env.config.set('ticket', 'workflow',
prevconfig + ',DeleteTicketActionController')
env.config.save()
env = self._testenv.get_trac_environment() # reloads the environment
loaded_components = env.compmgr.__metaclass__._components
delete_plugins = [c for c in loaded_components
if 'DeleteTicketActionController' in c.__name__]
try:
self.assertEqual(len(delete_plugins), 1,
"Plugin loaded more than once.")
finally:
# Remove the DeleteTicket plugin
env.config.set('ticket', 'workflow', prevconfig)
env.config.save()
self._testenv.restart()
for ext in ('py', 'pyc', 'pyo'):
filename = os.path.join(self._testenv.tracdir, 'plugins',
'DeleteTicket.%s' % ext)
if os.path.exists(filename):
os.unlink(filename)
class RegressionTestTicket3833a(FunctionalTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 a"""
# Assume the logging is already set to debug.
traclogfile = open(os.path.join(self._testenv.tracdir, 'log',
'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
# Verify that logging is on initially
env = self._testenv.get_trac_environment()
env.log.debug("RegressionTestTicket3833 debug1")
debug1 = traclogfile.read()
self.assertNotEqual(debug1.find("RegressionTestTicket3833 debug1"), -1,
'Logging off when it should have been on.\n%r' % debug1)
class RegressionTestTicket3833b(FunctionalTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 b"""
# Turn logging off, try to log something, and verify that it does
# not show up.
traclogfile = open(os.path.join(self._testenv.tracdir, 'log',
'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
env = self._testenv.get_trac_environment()
env.config.set('logging', 'log_level', 'INFO')
env.config.save()
env = self._testenv.get_trac_environment()
env.log.debug("RegressionTestTicket3833 debug2")
env.log.info("RegressionTestTicket3833 info2")
debug2 = traclogfile.read()
self.assertNotEqual(debug2.find("RegressionTestTicket3833 info2"), -1,
'Logging at info failed.\n%r' % debug2)
self.assertEqual(debug2.find("RegressionTestTicket3833 debug2"), -1,
'Logging still on when it should have been off.\n%r' % debug2)
class RegressionTestTicket3833c(FunctionalTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 c"""
# Turn logging back on, try to log something, and verify that it
# does show up.
traclogfile = open(os.path.join(self._testenv.tracdir, 'log',
'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
env = self._testenv.get_trac_environment()
env.config.set('logging', 'log_level', 'DEBUG')
time.sleep(2)
env.config.save()
#time.sleep(2)
env = self._testenv.get_trac_environment()
#time.sleep(2)
env.log.debug("RegressionTestTicket3833 debug3")
env.log.info("RegressionTestTicket3833 info3")
#time.sleep(2)
debug3 = traclogfile.read()
message = ''
success = debug3.find("RegressionTestTicket3833 debug3") != -1
if not success:
# Ok, the testcase failed, but we really need logging enabled.
self._testenv.restart()
env.log.debug("RegressionTestTicket3833 fixup3")
fixup3 = traclogfile.read()
message = 'Logging still off when it should have been on.\n' \
'%r\n%r' % (debug3, fixup3)
self.assert_(success, message)
class RegressionTestTicket5572(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5572"""
# TODO: this ticket (implemented in r6011) adds a new feature to
# make the progress bar more configurable. We need to test this
# new configurability.
class RegressionTestTicket7209(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/7209"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.create_ticket()
self._tester.add_comment(ticketid)
self._tester.attach_file_to_ticket(ticketid, tempfilename='hello.txt',
description='Preserved Descr')
self._tester.go_to_ticket(ticketid)
tc.find('Preserved Descr')
# Now replace the existing attachment, and the description should come
# through.
self._tester.attach_file_to_ticket(ticketid, tempfilename='hello.txt',
description='', replace=True)
self._tester.go_to_ticket(ticketid)
tc.find('Preserved Descr')
self._tester.attach_file_to_ticket(ticketid, tempfilename='blah.txt',
description='Second Attachment')
self._tester.go_to_ticket(ticketid)
tc.find('Second Attachment')
# This one should get a new description when it's replaced
# (Second->Other)
self._tester.attach_file_to_ticket(ticketid, tempfilename='blah.txt',
description='Other Attachment',
replace=True)
self._tester.go_to_ticket(ticketid)
tc.find('Other Attachment')
tc.notfind('Second Attachment')
class RegressionTestTicket9880(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/9880
Upload of a file which the browsers associates a Content-Type
of multipart/related (e.g. an .mht file) should succeed.
"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.create_ticket()
self._tester.attach_file_to_ticket(ticketid, tempfilename='hello.mht',
content_type='multipart/related',
data="""
Well, the actual content of the file doesn't matter, the problem is
related to the "multipart/..." content_type associated to the file.
See also http://bugs.python.org/issue15564.
""")
class ErrorPageValidation(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Validate the error page"""
url = self._tester.url + '/wiki/WikiStart'
tc.go(url + '?version=bug')
tc.url(url)
tc.find(internal_error)
class RegressionTestTicket3663(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Regression test for non-UTF-8 PATH_INFO (#3663)
Verify that URLs not encoded with UTF-8 are reported as invalid.
"""
# invalid PATH_INFO
self._tester.go_to_wiki(u'été'.encode('latin1'))
tc.code(404)
tc.find('Invalid URL encoding')
# invalid SCRIPT_NAME
tc.go(u'été'.encode('latin1'))
tc.code(404)
tc.find('Invalid URL encoding')
def functionalSuite():
suite = FunctionalTestSuite()
return suite
def suite():
suite = functionalSuite()
suite.addTest(RegressionTestRev6017())
suite.addTest(RegressionTestTicket3833a())
suite.addTest(RegressionTestTicket3833b())
suite.addTest(RegressionTestTicket3833c())
suite.addTest(RegressionTestTicket5572())
suite.addTest(RegressionTestTicket7209())
suite.addTest(RegressionTestTicket9880())
suite.addTest(ErrorPageValidation())
suite.addTest(RegressionTestTicket3663())
import trac.versioncontrol.tests
trac.versioncontrol.tests.functionalSuite(suite)
import trac.ticket.tests
trac.ticket.tests.functionalSuite(suite)
import trac.prefs.tests
trac.prefs.tests.functionalSuite(suite)
import trac.wiki.tests
trac.wiki.tests.functionalSuite(suite)
import trac.timeline.tests
trac.timeline.tests.functionalSuite(suite)
import trac.admin.tests
trac.admin.tests.functionalSuite(suite)
# The db tests should be last since the backup test occurs there.
import trac.db.tests
trac.db.tests.functionalSuite(suite)
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/svn_authz.py | trac/trac/versioncontrol/svn_authz.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2004-2009 Edgewall Software
# Copyright (C) 2004 Francois Harvey <fharvey@securiweb.net>
# Copyright (C) 2005 Matthew Good <trac@matt-good.net>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Francois Harvey <fharvey@securiweb.net>
# Matthew Good <trac@matt-good.net>
import os.path
from trac.config import Option, PathOption
from trac.core import *
from trac.perm import IPermissionPolicy
from trac.util import read_file
from trac.util.text import exception_to_unicode, to_unicode
from trac.util.translation import _
from trac.versioncontrol.api import RepositoryManager
def parent_iter(path):
while 1:
yield path
if path == '/':
return
path = path[:-1]
yield path
idx = path.rfind('/')
path = path[:idx + 1]
def join(*args):
args = (arg.strip('/') for arg in args)
return '/'.join(arg for arg in args if arg)
class ParseError(Exception):
"""Exception thrown for parse errors in authz files"""
def parse(authz, modules):
"""Parse a Subversion authorization file.
Return a dict of modules, each containing a dict of paths, each containing
a dict mapping users to permissions. Only modules contained in `modules`
are retained.
"""
groups = {}
aliases = {}
sections = {}
section = None
lineno = 0
for line in authz.splitlines():
lineno += 1
line = to_unicode(line.strip())
if not line or line.startswith(('#', ';')):
continue
if line.startswith('[') and line.endswith(']'):
section = line[1:-1]
continue
if section is None:
raise ParseError(_('Line %(lineno)d: Entry before first '
'section header', lineno=lineno))
parts = line.split('=', 1)
if len(parts) != 2:
raise ParseError(_('Line %(lineno)d: Invalid entry',
lineno=lineno))
name, value = parts
name = name.strip()
if section == 'groups':
group = groups.setdefault(name, set())
group.update(each.strip() for each in value.split(','))
elif section == 'aliases':
aliases[name] = value.strip()
else:
parts = section.split(':', 1)
module, path = parts[0] if len(parts) > 1 else '', parts[-1]
if module in modules:
sections.setdefault((module, path), []).append((name, value))
def resolve(subject, done):
if subject.startswith('@'):
done.add(subject)
for members in groups[subject[1:]] - done:
for each in resolve(members, done):
yield each
elif subject.startswith('&'):
yield aliases[subject[1:]]
else:
yield subject
authz = {}
for (module, path), items in sections.iteritems():
section = authz.setdefault(module, {}).setdefault(path, {})
for subject, perms in items:
for user in resolve(subject, set()):
section.setdefault(user, 'r' in perms) # The first match wins
return authz
class AuthzSourcePolicy(Component):
"""Permission policy for `source:` and `changeset:` resources using a
Subversion authz file.
`FILE_VIEW` and `BROWSER_VIEW` permissions are granted as specified in the
authz file.
`CHANGESET_VIEW` permission is granted for changesets where `FILE_VIEW` is
granted on at least one modified file, as well as for empty changesets.
"""
implements(IPermissionPolicy)
authz_file = PathOption('trac', 'authz_file', '',
"""The path to the Subversion
[http://svnbook.red-bean.com/en/1.5/svn.serverconfig.pathbasedauthz.html authorization (authz) file].
To enable authz permission checking, the `AuthzSourcePolicy` permission
policy must be added to `[trac] permission_policies`.
""")
authz_module_name = Option('trac', 'authz_module_name', '',
"""The module prefix used in the `authz_file` for the default
repository. If left empty, the global section is used.
""")
_mtime = 0
_authz = {}
_users = set()
_handled_perms = frozenset([(None, 'BROWSER_VIEW'),
(None, 'CHANGESET_VIEW'),
(None, 'FILE_VIEW'),
(None, 'LOG_VIEW'),
('source', 'BROWSER_VIEW'),
('source', 'FILE_VIEW'),
('source', 'LOG_VIEW'),
('changeset', 'CHANGESET_VIEW')])
# IPermissionPolicy methods
def check_permission(self, action, username, resource, perm):
realm = resource.realm if resource else None
if (realm, action) in self._handled_perms:
authz, users = self._get_authz_info()
if authz is None:
return False
if username == 'anonymous':
usernames = ('$anonymous', '*')
else:
usernames = (username, '$authenticated', '*')
if resource is None:
return True if users & set(usernames) else None
rm = RepositoryManager(self.env)
try:
repos = rm.get_repository(resource.parent.id)
except TracError:
return True # Allow error to be displayed in the repo index
if repos is None:
return True
modules = [resource.parent.id or self.authz_module_name]
if modules[0]:
modules.append('')
def check_path(path):
path = '/' + join(repos.scope, path)
if path != '/':
path += '/'
# Allow access to parent directories of allowed resources
if any(section.get(user) is True
for module in modules
for spath, section in authz.get(module, {}).iteritems()
if spath.startswith(path)
for user in usernames):
return True
# Walk from resource up parent directories
for spath in parent_iter(path):
for module in modules:
section = authz.get(module, {}).get(spath)
if section:
for user in usernames:
result = section.get(user)
if result is not None:
return result
if realm == 'source':
return check_path(resource.id)
elif realm == 'changeset':
changes = list(repos.get_changeset(resource.id).get_changes())
if not changes or any(check_path(change[0])
for change in changes):
return True
def _get_authz_info(self):
try:
mtime = os.path.getmtime(self.authz_file)
except OSError, e:
if self._authz is not None:
self.log.error('Error accessing authz file: %s',
exception_to_unicode(e))
self._mtime = mtime = 0
self._authz = None
self._users = set()
if mtime > self._mtime:
self._mtime = mtime
rm = RepositoryManager(self.env)
modules = set(repos.reponame
for repos in rm.get_real_repositories())
if '' in modules and self.authz_module_name:
modules.add(self.authz_module_name)
modules.add('')
self.log.info('Parsing authz file: %s' % self.authz_file)
try:
self._authz = parse(read_file(self.authz_file), modules)
self._users = set(user for paths in self._authz.itervalues()
for path in paths.itervalues()
for user, result in path.iteritems()
if result)
except Exception, e:
self._authz = None
self._users = set()
self.log.error('Error parsing authz file: %s',
exception_to_unicode(e))
return self._authz, self._users
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/svn_prop.py | trac/trac/versioncontrol/svn_prop.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.util import import_namespace
import_namespace(globals(), 'tracopt.versioncontrol.svn.svn_prop')
# This module is a stub provided for backward compatibility. The svn_prop
# module has been moved to tracopt.versioncontrol.svn. Please update your
# code to use the new location.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/api.py | trac/trac/versioncontrol/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2005-2011 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from __future__ import with_statement
import os.path
import time
from trac.admin import AdminCommandError, IAdminCommandProvider, get_dir_list
from trac.config import ConfigSection, ListOption, Option
from trac.core import *
from trac.resource import IResourceManager, Resource, ResourceNotFound
from trac.util.concurrency import threading
from trac.util.text import printout, to_unicode
from trac.util.translation import _
from trac.web.api import IRequestFilter
def is_default(reponame):
"""Check whether `reponame` is the default repository."""
return not reponame or reponame in ('(default)', _('(default)'))
class IRepositoryConnector(Interface):
"""Provide support for a specific version control system."""
error = None # place holder for storing relevant error message
def get_supported_types():
"""Return the types of version control systems that are supported.
Yields `(repotype, priority)` pairs, where `repotype` is used to
match against the configured `[trac] repository_type` value in TracIni.
If multiple provider match a given type, the `priority` is used to
choose between them (highest number is highest priority).
If the `priority` returned is negative, this indicates that the
connector for the given `repotype` indeed exists but can't be
used for some reason. The `error` property can then be used to
store an error message or exception relevant to the problem detected.
"""
def get_repository(repos_type, repos_dir, params):
"""Return a Repository instance for the given repository type and dir.
"""
class IRepositoryProvider(Interface):
"""Provide known named instances of Repository."""
def get_repositories():
"""Generate repository information for known repositories.
Repository information is a key,value pair, where the value is
a dictionary which must contain at the very least either of
the following entries:
- `'dir'`: the repository directory which can be used by the
connector to create a `Repository` instance. This
defines a "real" repository.
- `'alias'`: the name of another repository. This defines an
alias to another (real) repository.
Optional entries:
- `'type'`: the type of the repository (if not given, the
default repository type will be used).
- `'description'`: a description of the repository (can
contain WikiFormatting).
- `'hidden'`: if set to `'true'`, the repository is hidden
from the repository index.
- `'url'`: the base URL for checking out the repository.
"""
class IRepositoryChangeListener(Interface):
"""Listen for changes in repositories."""
def changeset_added(repos, changeset):
"""Called after a changeset has been added to a repository."""
def changeset_modified(repos, changeset, old_changeset):
"""Called after a changeset has been modified in a repository.
The `old_changeset` argument contains the metadata of the changeset
prior to the modification. It is `None` if the old metadata cannot
be retrieved.
"""
class DbRepositoryProvider(Component):
"""Component providing repositories registered in the DB."""
implements(IRepositoryProvider, IAdminCommandProvider)
repository_attrs = ('alias', 'description', 'dir', 'hidden', 'name',
'type', 'url')
# IRepositoryProvider methods
def get_repositories(self):
"""Retrieve repositories specified in the repository DB table."""
repos = {}
for id, name, value in self.env.db_query(
"SELECT id, name, value FROM repository WHERE name IN (%s)"
% ",".join("'%s'" % each for each in self.repository_attrs)):
if value is not None:
repos.setdefault(id, {})[name] = value
reponames = {}
for id, info in repos.iteritems():
if 'name' in info and ('dir' in info or 'alias' in info):
info['id'] = id
reponames[info['name']] = info
return reponames.iteritems()
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('repository add', '<repos> <dir> [type]',
"Add a source repository",
self._complete_add, self._do_add)
yield ('repository alias', '<name> <target>',
"Create an alias for a repository",
self._complete_alias, self._do_alias)
yield ('repository remove', '<repos>',
"Remove a source repository",
self._complete_repos, self._do_remove)
yield ('repository set', '<repos> <key> <value>',
"""Set an attribute of a repository
The following keys are supported: %s
""" % ', '.join(self.repository_attrs),
self._complete_set, self._do_set)
def get_reponames(self):
rm = RepositoryManager(self.env)
return [reponame or '(default)' for reponame
in rm.get_all_repositories()]
def _complete_add(self, args):
if len(args) == 2:
return get_dir_list(args[-1], True)
elif len(args) == 3:
return RepositoryManager(self.env).get_supported_types()
def _complete_alias(self, args):
if len(args) == 2:
return self.get_reponames()
def _complete_repos(self, args):
if len(args) == 1:
return self.get_reponames()
def _complete_set(self, args):
if len(args) == 1:
return self.get_reponames()
elif len(args) == 2:
return self.repository_attrs
def _do_add(self, reponame, dir, type_=None):
self.add_repository(reponame, os.path.abspath(dir), type_)
def _do_alias(self, reponame, target):
self.add_alias(reponame, target)
def _do_remove(self, reponame):
self.remove_repository(reponame)
def _do_set(self, reponame, key, value):
if key not in self.repository_attrs:
raise AdminCommandError(_('Invalid key "%(key)s"', key=key))
if key == 'dir':
value = os.path.abspath(value)
self.modify_repository(reponame, {key: value})
if not reponame:
reponame = '(default)'
if key == 'dir':
printout(_('You should now run "repository resync %(name)s".',
name=reponame))
elif key == 'type':
printout(_('You may have to run "repository resync %(name)s".',
name=reponame))
# Public interface
def add_repository(self, reponame, dir, type_=None):
"""Add a repository."""
if not os.path.isabs(dir):
raise TracError(_("The repository directory must be absolute"))
if is_default(reponame):
reponame = ''
rm = RepositoryManager(self.env)
if type_ and type_ not in rm.get_supported_types():
raise TracError(_("The repository type '%(type)s' is not "
"supported", type=type_))
with self.env.db_transaction as db:
id = rm.get_repository_id(reponame)
db.executemany(
"INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
[(id, 'dir', dir),
(id, 'type', type_ or '')])
rm.reload_repositories()
def add_alias(self, reponame, target):
"""Create an alias repository."""
if is_default(reponame):
reponame = ''
if is_default(target):
target = ''
rm = RepositoryManager(self.env)
with self.env.db_transaction as db:
id = rm.get_repository_id(reponame)
db.executemany(
"INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
[(id, 'dir', None),
(id, 'alias', target)])
rm.reload_repositories()
def remove_repository(self, reponame):
"""Remove a repository."""
if is_default(reponame):
reponame = ''
rm = RepositoryManager(self.env)
with self.env.db_transaction as db:
id = rm.get_repository_id(reponame)
db("DELETE FROM repository WHERE id=%s", (id,))
db("DELETE FROM revision WHERE repos=%s", (id,))
db("DELETE FROM node_change WHERE repos=%s", (id,))
rm.reload_repositories()
def modify_repository(self, reponame, changes):
"""Modify attributes of a repository."""
if is_default(reponame):
reponame = ''
rm = RepositoryManager(self.env)
with self.env.db_transaction as db:
id = rm.get_repository_id(reponame)
for (k, v) in changes.iteritems():
if k not in self.repository_attrs:
continue
if k in ('alias', 'name') and is_default(v):
v = ''
if k == 'dir' and not os.path.isabs(v):
raise TracError(_("The repository directory must be "
"absolute"))
db("UPDATE repository SET value=%s WHERE id=%s AND name=%s",
(v, id, k))
if not db(
"SELECT value FROM repository WHERE id=%s AND name=%s",
(id, k)):
db("""INSERT INTO repository (id, name, value)
VALUES (%s, %s, %s)
""", (id, k, v))
rm.reload_repositories()
class RepositoryManager(Component):
"""Version control system manager."""
implements(IRequestFilter, IResourceManager, IRepositoryProvider)
connectors = ExtensionPoint(IRepositoryConnector)
providers = ExtensionPoint(IRepositoryProvider)
change_listeners = ExtensionPoint(IRepositoryChangeListener)
repositories_section = ConfigSection('repositories',
"""One of the alternatives for registering new repositories is to
populate the `[repositories]` section of the `trac.ini`.
This is especially suited for setting up convenience aliases,
short-lived repositories, or during the initial phases of an
installation.
See [TracRepositoryAdmin#Intrac.ini TracRepositoryAdmin] for details
about the format adopted for this section and the rest of that page for
the other alternatives.
(''since 0.12'')""")
repository_type = Option('trac', 'repository_type', 'svn',
"""Default repository connector type. (''since 0.10'')
This is also used as the default repository type for repositories
defined in [[TracIni#repositories-section repositories]] or using the
"Repositories" admin panel. (''since 0.12'')
""")
repository_dir = Option('trac', 'repository_dir', '',
"""Path to the default repository. This can also be a relative path
(''since 0.11'').
This option is deprecated, and repositories should be defined in the
[TracIni#repositories-section repositories] section, or using the
"Repositories" admin panel. (''since 0.12'')""")
repository_sync_per_request = ListOption('trac',
'repository_sync_per_request', '(default)',
doc="""List of repositories that should be synchronized on every page
request.
Leave this option empty if you have set up post-commit hooks calling
`trac-admin $ENV changeset added` on all your repositories
(recommended). Otherwise, set it to a comma-separated list of
repository names. Note that this will negatively affect performance,
and will prevent changeset listeners from receiving events from the
repositories specified here. The default is to synchronize the default
repository, for backward compatibility. (''since 0.12'')""")
def __init__(self):
self._cache = {}
self._lock = threading.Lock()
self._connectors = None
self._all_repositories = None
# IRequestFilter methods
def pre_process_request(self, req, handler):
from trac.web.chrome import Chrome, add_warning
if handler is not Chrome(self.env):
for reponame in self.repository_sync_per_request:
start = time.time()
if is_default(reponame):
reponame = ''
try:
repo = self.get_repository(reponame)
if repo:
repo.sync()
else:
self.log.warning("Unable to find repository '%s' for "
"synchronization",
reponame or '(default)')
continue
except TracError, e:
add_warning(req,
_("Can't synchronize with repository \"%(name)s\" "
"(%(error)s). Look in the Trac log for more "
"information.", name=reponame or '(default)',
error=to_unicode(e.message)))
self.log.info("Synchronized '%s' repository in %0.2f seconds",
reponame or '(default)', time.time() - start)
return handler
def post_process_request(self, req, template, data, content_type):
return (template, data, content_type)
# IResourceManager methods
def get_resource_realms(self):
yield 'changeset'
yield 'source'
yield 'repository'
def get_resource_description(self, resource, format=None, **kwargs):
if resource.realm == 'changeset':
parent = resource.parent
reponame = parent and parent.id
id = resource.id
if reponame:
return _("Changeset %(rev)s in %(repo)s", rev=id, repo=reponame)
else:
return _("Changeset %(rev)s", rev=id)
elif resource.realm == 'source':
parent = resource.parent
reponame = parent and parent.id
id = resource.id
version = ''
if format == 'summary':
repos = self.get_repository(reponame)
node = repos.get_node(resource.id, resource.version)
if node.isdir:
kind = _("directory")
elif node.isfile:
kind = _("file")
if resource.version:
version = _(" at version %(rev)s", rev=resource.version)
else:
kind = _("path")
if resource.version:
version = '@%s' % resource.version
in_repo = _(" in %(repo)s", repo=reponame) if reponame else ''
# TRANSLATOR: file /path/to/file.py at version 13 in reponame
return _('%(kind)s %(id)s%(at_version)s%(in_repo)s',
kind=kind, id=id, at_version=version, in_repo=in_repo)
elif resource.realm == 'repository':
return _("Repository %(repo)s", repo=resource.id)
def get_resource_url(self, resource, href, **kwargs):
if resource.realm == 'changeset':
parent = resource.parent
return href.changeset(resource.id, parent and parent.id or None)
elif resource.realm == 'source':
parent = resource.parent
return href.browser(parent and parent.id or None, resource.id,
rev=resource.version or None)
elif resource.realm == 'repository':
return href.browser(resource.id or None)
def resource_exists(self, resource):
if resource.realm == 'repository':
reponame = resource.id
else:
reponame = resource.parent.id
repos = self.env.get_repository(reponame)
if not repos:
return False
if resource.realm == 'changeset':
try:
repos.get_changeset(resource.id)
return True
except NoSuchChangeset:
return False
elif resource.realm == 'source':
try:
repos.get_node(resource.id, resource.version)
return True
except NoSuchNode:
return False
elif resource.realm == 'repository':
return True
# IRepositoryProvider methods
def get_repositories(self):
"""Retrieve repositories specified in TracIni.
The `[repositories]` section can be used to specify a list
of repositories.
"""
repositories = self.repositories_section
reponames = {}
# eventually add pre-0.12 default repository
if self.repository_dir:
reponames[''] = {'dir': self.repository_dir}
# first pass to gather the <name>.dir entries
for option in repositories:
if option.endswith('.dir'):
reponames[option[:-4]] = {}
# second pass to gather aliases
for option in repositories:
alias = repositories.get(option)
if '.' not in option: # Support <alias> = <repo> syntax
option += '.alias'
if option.endswith('.alias') and alias in reponames:
reponames.setdefault(option[:-6], {})['alias'] = alias
# third pass to gather the <name>.<detail> entries
for option in repositories:
if '.' in option:
name, detail = option.rsplit('.', 1)
if name in reponames and detail != 'alias':
reponames[name][detail] = repositories.get(option)
for reponame, info in reponames.iteritems():
yield (reponame, info)
# Public API methods
def get_supported_types(self):
"""Return the list of supported repository types."""
types = set(type_ for connector in self.connectors
for (type_, prio) in connector.get_supported_types() or []
if prio >= 0)
return list(types)
def get_repositories_by_dir(self, directory):
"""Retrieve the repositories based on the given directory.
:param directory: the key for identifying the repositories.
:return: list of `Repository` instances.
"""
directory = os.path.join(os.path.normcase(directory), '')
repositories = []
for reponame, repoinfo in self.get_all_repositories().iteritems():
dir = repoinfo.get('dir')
if dir:
dir = os.path.join(os.path.normcase(dir), '')
if dir.startswith(directory):
repos = self.get_repository(reponame)
if repos:
repositories.append(repos)
return repositories
def get_repository_id(self, reponame):
"""Return a unique id for the given repository name.
This will create and save a new id if none is found.
\note: this should probably be renamed as we're dealing
exclusively with *db* repository ids here.
"""
with self.env.db_transaction as db:
for id, in db(
"SELECT id FROM repository WHERE name='name' AND value=%s",
(reponame,)):
return id
id = db("SELECT COALESCE(MAX(id), 0) FROM repository")[0][0] + 1
db("INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
(id, 'name', reponame))
return id
def get_repository(self, reponame):
"""Retrieve the appropriate `Repository` for the given
repository name.
:param reponame: the key for specifying the repository.
If no name is given, take the default
repository.
:return: if no corresponding repository was defined,
simply return `None`.
"""
reponame = reponame or ''
repoinfo = self.get_all_repositories().get(reponame, {})
if 'alias' in repoinfo:
reponame = repoinfo['alias']
repoinfo = self.get_all_repositories().get(reponame, {})
rdir = repoinfo.get('dir')
if not rdir:
return None
rtype = repoinfo.get('type') or self.repository_type
# get a Repository for the reponame (use a thread-level cache)
with self.env.db_transaction: # prevent possible deadlock, see #4465
with self._lock:
tid = threading._get_ident()
if tid in self._cache:
repositories = self._cache[tid]
else:
repositories = self._cache[tid] = {}
repos = repositories.get(reponame)
if not repos:
if not os.path.isabs(rdir):
rdir = os.path.join(self.env.path, rdir)
connector = self._get_connector(rtype)
repos = connector.get_repository(rtype, rdir,
repoinfo.copy())
repositories[reponame] = repos
return repos
def get_repository_by_path(self, path):
"""Retrieve a matching `Repository` for the given `path`.
:param path: the eventually scoped repository-scoped path
:return: a `(reponame, repos, path)` triple, where `path` is
the remaining part of `path` once the `reponame` has
been truncated, if needed.
"""
matches = []
path = path.strip('/') + '/' if path else '/'
for reponame in self.get_all_repositories().keys():
stripped_reponame = reponame.strip('/') + '/'
if path.startswith(stripped_reponame):
matches.append((len(stripped_reponame), reponame))
if matches:
matches.sort()
length, reponame = matches[-1]
path = path[length:]
else:
reponame = ''
return (reponame, self.get_repository(reponame),
path.rstrip('/') or '/')
def get_default_repository(self, context):
"""Recover the appropriate repository from the current context.
Lookup the closest source or changeset resource in the context
hierarchy and return the name of its associated repository.
"""
while context:
if context.resource.realm in ('source', 'changeset'):
return context.resource.parent.id
context = context.parent
def get_all_repositories(self):
"""Return a dictionary of repository information, indexed by name."""
if not self._all_repositories:
all_repositories = {}
for provider in self.providers:
for reponame, info in provider.get_repositories() or []:
if reponame in all_repositories:
self.log.warn("Discarding duplicate repository '%s'",
reponame)
else:
info['name'] = reponame
if 'id' not in info:
info['id'] = self.get_repository_id(reponame)
all_repositories[reponame] = info
self._all_repositories = all_repositories
return self._all_repositories
def get_real_repositories(self):
"""Return a set of all real repositories (i.e. excluding aliases)."""
repositories = set()
for reponame in self.get_all_repositories():
try:
repos = self.get_repository(reponame)
if repos is not None:
repositories.add(repos)
except TracError:
pass # Skip invalid repositories
return repositories
def reload_repositories(self):
"""Reload the repositories from the providers."""
with self._lock:
# FIXME: trac-admin doesn't reload the environment
self._cache = {}
self._all_repositories = None
self.config.touch() # Force environment reload
def notify(self, event, reponame, revs):
"""Notify repositories and change listeners about repository events.
The supported events are the names of the methods defined in the
`IRepositoryChangeListener` interface.
"""
self.log.debug("Event %s on %s for changesets %r",
event, reponame, revs)
# Notify a repository by name, and all repositories with the same
# base, or all repositories by base or by repository dir
repos = self.get_repository(reponame)
repositories = []
if repos:
base = repos.get_base()
else:
dir = os.path.abspath(reponame)
repositories = self.get_repositories_by_dir(dir)
if repositories:
base = None
else:
base = reponame
if base:
repositories = [r for r in self.get_real_repositories()
if r.get_base() == base]
if not repositories:
self.log.warn("Found no repositories matching '%s' base.",
base or reponame)
return
for repos in sorted(repositories, key=lambda r: r.reponame):
repos.sync()
for rev in revs:
args = []
if event == 'changeset_modified':
args.append(repos.sync_changeset(rev))
try:
changeset = repos.get_changeset(rev)
except NoSuchChangeset:
try:
repos.sync_changeset(rev)
changeset = repos.get_changeset(rev)
except NoSuchChangeset:
continue
self.log.debug("Event %s on %s for revision %s",
event, repos.reponame or '(default)', rev)
for listener in self.change_listeners:
getattr(listener, event)(repos, changeset, *args)
def shutdown(self, tid=None):
"""Free `Repository` instances bound to a given thread identifier"""
if tid:
assert tid == threading._get_ident()
with self._lock:
repositories = self._cache.pop(tid, {})
for reponame, repos in repositories.iteritems():
repos.close()
# private methods
def _get_connector(self, rtype):
"""Retrieve the appropriate connector for the given repository type.
Note that the self._lock must be held when calling this method.
"""
if self._connectors is None:
# build an environment-level cache for the preferred connectors
self._connectors = {}
for connector in self.connectors:
for type_, prio in connector.get_supported_types() or []:
keep = (connector, prio)
if type_ in self._connectors and \
prio <= self._connectors[type_][1]:
keep = None
if keep:
self._connectors[type_] = keep
if rtype in self._connectors:
connector, prio = self._connectors[rtype]
if prio >= 0: # no error condition
return connector
else:
raise TracError(
_('Unsupported version control system "%(name)s"'
': %(error)s', name=rtype,
error=to_unicode(connector.error)))
else:
raise TracError(
_('Unsupported version control system "%(name)s": '
'Can\'t find an appropriate component, maybe the '
'corresponding plugin was not enabled? ', name=rtype))
class NoSuchChangeset(ResourceNotFound):
def __init__(self, rev):
ResourceNotFound.__init__(self,
_('No changeset %(rev)s in the repository',
rev=rev),
_('No such changeset'))
class NoSuchNode(ResourceNotFound):
def __init__(self, path, rev, msg=None):
if msg is None:
msg = _("No node %(path)s at revision %(rev)s", path=path, rev=rev)
else:
msg = _("%(msg)s: No node %(path)s at revision %(rev)s",
msg=msg, path=path, rev=rev)
ResourceNotFound.__init__(self, msg, _('No such node'))
class Repository(object):
"""Base class for a repository provided by a version control system."""
has_linear_changesets = False
scope = '/'
def __init__(self, name, params, log):
"""Initialize a repository.
:param name: a unique name identifying the repository, usually a
type-specific prefix followed by the path to the
repository.
:param params: a `dict` of parameters for the repository. Contains
the name of the repository under the key "name" and
the surrogate key that identifies the repository in
the database under the key "id".
:param log: a logger instance.
"""
self.name = name
self.params = params
self.reponame = params['name']
self.id = params['id']
self.log = log
self.resource = Resource('repository', self.reponame)
def close(self):
"""Close the connection to the repository."""
raise NotImplementedError
def get_base(self):
"""Return the name of the base repository for this repository.
This function returns the name of the base repository to which scoped
repositories belong. For non-scoped repositories, it returns the
repository name.
"""
return self.name
def clear(self, youngest_rev=None):
"""Clear any data that may have been cached in instance properties.
`youngest_rev` can be specified as a way to force the value
of the `youngest_rev` property (''will change in 0.12'').
"""
pass
def sync(self, rev_callback=None, clean=False):
"""Perform a sync of the repository cache, if relevant.
If given, `rev_callback` must be a callable taking a `rev` parameter.
The backend will call this function for each `rev` it decided to
synchronize, once the synchronization changes are committed to the
cache. When `clean` is `True`, the cache is cleaned first.
"""
pass
def sync_changeset(self, rev):
"""Resync the repository cache for the given `rev`, if relevant.
Returns a "metadata-only" changeset containing the metadata prior to
the resync, or `None` if the old values cannot be retrieved (typically
when the repository is not cached).
"""
return None
def get_quickjump_entries(self, rev):
"""Generate a list of interesting places in the repository.
`rev` might be used to restrict the list of available locations,
but in general it's best to produce all known locations.
The generated results must be of the form (category, name, path, rev).
"""
return []
def get_path_url(self, path, rev):
"""Return the repository URL for the given path and revision.
The returned URL can be `None`, meaning that no URL has been specified
for the repository, an absolute URL, or a scheme-relative URL starting
with `//`, in which case the scheme of the request should be prepended.
"""
return None
def get_changeset(self, rev):
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/admin.py | trac/trac/versioncontrol/admin.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
import os.path
import sys
from genshi.builder import tag
from trac.admin import IAdminCommandProvider, IAdminPanelProvider
from trac.config import ListOption
from trac.core import *
from trac.perm import IPermissionRequestor
from trac.util import as_bool, is_path_below
from trac.util.compat import any
from trac.util.text import breakable_path, normalize_whitespace, print_table, \
printout
from trac.util.translation import _, ngettext, tag_
from trac.versioncontrol import DbRepositoryProvider, RepositoryManager, \
is_default
from trac.web.chrome import Chrome, add_notice, add_warning
class VersionControlAdmin(Component):
"""trac-admin command provider for version control administration."""
implements(IAdminCommandProvider, IPermissionRequestor)
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('changeset added', '<repos> <rev> [rev] [...]',
"""Notify trac about changesets added to a repository
This command should be called from a post-commit hook. It will
trigger a cache update and notify components about the addition.
""",
self._complete_repos, self._do_changeset_added)
yield ('changeset modified', '<repos> <rev> [rev] [...]',
"""Notify trac about changesets modified in a repository
This command should be called from a post-revprop hook after
revision properties like the commit message, author or date
have been changed. It will trigger a cache update for the given
revisions and notify components about the change.
""",
self._complete_repos, self._do_changeset_modified)
yield ('repository list', '',
'List source repositories',
None, self._do_list)
yield ('repository resync', '<repos> [rev]',
"""Re-synchronize trac with repositories
When [rev] is specified, only that revision is synchronized.
Otherwise, the complete revision history is synchronized. Note
that this operation can take a long time to complete.
If synchronization gets interrupted, it can be resumed later
using the `sync` command.
To synchronize all repositories, specify "*" as the repository.
""",
self._complete_repos, self._do_resync)
yield ('repository sync', '<repos> [rev]',
"""Resume synchronization of repositories
It works like `resync`, except that it doesn't clear the already
synchronized changesets, so it's a better way to resume an
interrupted `resync`.
See `resync` help for detailed usage.
""",
self._complete_repos, self._do_sync)
def get_reponames(self):
rm = RepositoryManager(self.env)
return [reponame or '(default)' for reponame
in rm.get_all_repositories()]
def _complete_repos(self, args):
if len(args) == 1:
return self.get_reponames()
def _do_changeset_added(self, reponame, *revs):
if is_default(reponame):
reponame = ''
rm = RepositoryManager(self.env)
rm.notify('changeset_added', reponame, revs)
def _do_changeset_modified(self, reponame, *revs):
if is_default(reponame):
reponame = ''
rm = RepositoryManager(self.env)
rm.notify('changeset_modified', reponame, revs)
def _do_list(self):
rm = RepositoryManager(self.env)
values = []
for (reponame, info) in sorted(rm.get_all_repositories().iteritems()):
alias = ''
if 'alias' in info:
alias = info['alias'] or '(default)'
values.append((reponame or '(default)', info.get('type', ''),
alias, info.get('dir', '')))
print_table(values, [_('Name'), _('Type'), _('Alias'), _('Directory')])
def _sync(self, reponame, rev, clean):
rm = RepositoryManager(self.env)
if reponame == '*':
if rev is not None:
raise TracError(_('Cannot synchronize a single revision '
'on multiple repositories'))
repositories = rm.get_real_repositories()
else:
if is_default(reponame):
reponame = ''
repos = rm.get_repository(reponame)
if repos is None:
raise TracError(_("Repository '%(repo)s' not found",
repo=reponame or '(default)'))
if rev is not None:
repos.sync_changeset(rev)
printout(_('%(rev)s resynced on %(reponame)s.', rev=rev,
reponame=repos.reponame or '(default)'))
return
repositories = [repos]
for repos in sorted(repositories, key=lambda r: r.reponame):
printout(_('Resyncing repository history for %(reponame)s... ',
reponame=repos.reponame or '(default)'))
repos.sync(self._sync_feedback, clean=clean)
for cnt, in self.env.db_query(
"SELECT count(rev) FROM revision WHERE repos=%s",
(repos.id,)):
printout(ngettext('%(num)s revision cached.',
'%(num)s revisions cached.', num=cnt))
printout(_('Done.'))
def _sync_feedback(self, rev):
sys.stdout.write(' [%s]\r' % rev)
sys.stdout.flush()
def _do_resync(self, reponame, rev=None):
self._sync(reponame, rev, clean=True)
def _do_sync(self, reponame, rev=None):
self._sync(reponame, rev, clean=False)
# IPermissionRequestor methods
def get_permission_actions(self):
return [('VERSIONCONTROL_ADMIN', ['BROWSER_VIEW', 'CHANGESET_VIEW',
'FILE_VIEW', 'LOG_VIEW'])]
class RepositoryAdminPanel(Component):
"""Web admin panel for repository administration."""
implements(IAdminPanelProvider)
allowed_repository_dir_prefixes = ListOption('versioncontrol',
'allowed_repository_dir_prefixes', '',
doc="""Comma-separated list of allowed prefixes for repository
directories when adding and editing repositories in the repository
admin panel. If the list is empty, all repository directories are
allowed. (''since 0.12.1'')""")
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'VERSIONCONTROL_ADMIN' in req.perm:
yield ('versioncontrol', _('Version Control'), 'repository',
_('Repositories'))
def render_admin_panel(self, req, category, page, path_info):
req.perm.require('VERSIONCONTROL_ADMIN')
# Retrieve info for all repositories
rm = RepositoryManager(self.env)
all_repos = rm.get_all_repositories()
db_provider = self.env[DbRepositoryProvider]
if path_info:
# Detail view
reponame = path_info if not is_default(path_info) else ''
info = all_repos.get(reponame)
if info is None:
raise TracError(_("Repository '%(repo)s' not found",
repo=path_info))
if req.method == 'POST':
if req.args.get('cancel'):
req.redirect(req.href.admin(category, page))
elif db_provider and req.args.get('save'):
# Modify repository
changes = {}
for field in db_provider.repository_attrs:
value = normalize_whitespace(req.args.get(field))
if (value is not None or field == 'hidden') \
and value != info.get(field):
changes[field] = value
if 'dir' in changes \
and not self._check_dir(req, changes['dir']):
changes = {}
if changes:
db_provider.modify_repository(reponame, changes)
add_notice(req, _('Your changes have been saved.'))
name = req.args.get('name')
resync = tag.tt('trac-admin $ENV repository resync "%s"'
% (name or '(default)'))
if 'dir' in changes:
msg = tag_('You should now run %(resync)s to '
'synchronize Trac with the repository.',
resync=resync)
add_notice(req, msg)
elif 'type' in changes:
msg = tag_('You may have to run %(resync)s to '
'synchronize Trac with the repository.',
resync=resync)
add_notice(req, msg)
if name and name != path_info and not 'alias' in info:
cset_added = tag.tt('trac-admin $ENV changeset '
'added "%s" $REV'
% (name or '(default)'))
msg = tag_('You will need to update your post-commit '
'hook to call %(cset_added)s with the new '
'repository name.', cset_added=cset_added)
add_notice(req, msg)
if changes:
req.redirect(req.href.admin(category, page))
Chrome(self.env).add_wiki_toolbars(req)
data = {'view': 'detail', 'reponame': reponame}
else:
# List view
if req.method == 'POST':
# Add a repository
if db_provider and req.args.get('add_repos'):
name = req.args.get('name')
type_ = req.args.get('type')
# Avoid errors when copy/pasting paths
dir = normalize_whitespace(req.args.get('dir', ''))
if name is None or type_ is None or not dir:
add_warning(req, _('Missing arguments to add a '
'repository.'))
elif self._check_dir(req, dir):
db_provider.add_repository(name, dir, type_)
name = name or '(default)'
add_notice(req, _('The repository "%(name)s" has been '
'added.', name=name))
resync = tag.tt('trac-admin $ENV repository resync '
'"%s"' % name)
msg = tag_('You should now run %(resync)s to '
'synchronize Trac with the repository.',
resync=resync)
add_notice(req, msg)
cset_added = tag.tt('trac-admin $ENV changeset '
'added "%s" $REV' % name)
msg = tag_('You should also set up a post-commit hook '
'on the repository to call %(cset_added)s '
'for each committed changeset.',
cset_added=cset_added)
add_notice(req, msg)
req.redirect(req.href.admin(category, page))
# Add a repository alias
elif db_provider and req.args.get('add_alias'):
name = req.args.get('name')
alias = req.args.get('alias')
if name is not None and alias is not None:
db_provider.add_alias(name, alias)
add_notice(req, _('The alias "%(name)s" has been '
'added.', name=name or '(default)'))
req.redirect(req.href.admin(category, page))
add_warning(req, _('Missing arguments to add an '
'alias.'))
# Refresh the list of repositories
elif req.args.get('refresh'):
req.redirect(req.href.admin(category, page))
# Remove repositories
elif db_provider and req.args.get('remove'):
sel = req.args.getlist('sel')
if sel:
for name in sel:
db_provider.remove_repository(name)
add_notice(req, _('The selected repositories have '
'been removed.'))
req.redirect(req.href.admin(category, page))
add_warning(req, _('No repositories were selected.'))
data = {'view': 'list'}
# Find repositories that are editable
db_repos = {}
if db_provider is not None:
db_repos = dict(db_provider.get_repositories())
# Prepare common rendering data
repositories = dict((reponame, self._extend_info(reponame, info.copy(),
reponame in db_repos))
for (reponame, info) in all_repos.iteritems())
types = sorted([''] + rm.get_supported_types())
data.update({'types': types, 'default_type': rm.repository_type,
'repositories': repositories})
return 'admin_repositories.html', data
def _extend_info(self, reponame, info, editable):
"""Extend repository info for rendering."""
info['name'] = reponame
if info.get('dir') is not None:
info['prettydir'] = breakable_path(info['dir']) or ''
info['hidden'] = as_bool(info.get('hidden'))
info['editable'] = editable
if not info.get('alias'):
try:
repos = RepositoryManager(self.env).get_repository(reponame)
youngest_rev = repos.get_youngest_rev()
info['rev'] = youngest_rev
info['display_rev'] = repos.display_rev(youngest_rev)
except Exception:
pass
return info
def _check_dir(self, req, dir):
"""Check that a repository directory is valid, and add a warning
message if not.
"""
if not os.path.isabs(dir):
add_warning(req, _('The repository directory must be an absolute '
'path.'))
return False
prefixes = [os.path.join(self.env.path, prefix)
for prefix in self.allowed_repository_dir_prefixes]
if prefixes and not any(is_path_below(dir, prefix)
for prefix in prefixes):
add_warning(req, _('The repository directory must be located '
'below one of the following directories: '
'%(dirs)s', dirs=', '.join(prefixes)))
return False
return True
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/diff.py | trac/trac/versioncontrol/diff.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2004-2009 Edgewall Software
# Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
import difflib
import re
from genshi import Markup, escape
from trac.util.text import expandtabs
__all__ = ['diff_blocks', 'get_change_extent', 'get_diff_options',
'unified_diff']
def get_change_extent(str1, str2):
"""Determines the extent of differences between two strings.
Returns a pair containing the offset at which the changes start,
and the negative offset at which the changes end.
If the two strings have neither a common prefix nor a common
suffix, ``(0, 0)`` is returned.
"""
start = 0
limit = min(len(str1), len(str2))
while start < limit and str1[start] == str2[start]:
start += 1
end = -1
limit = limit - start
while -end <= limit and str1[end] == str2[end]:
end -= 1
return (start, end + 1)
def get_filtered_hunks(fromlines, tolines, context=None,
ignore_blank_lines=False, ignore_case=False,
ignore_space_changes=False):
"""Retrieve differences in the form of `difflib.SequenceMatcher`
opcodes, grouped according to the ``context`` and ``ignore_*``
parameters.
:param fromlines: list of lines corresponding to the old content
:param tolines: list of lines corresponding to the new content
:param ignore_blank_lines: differences about empty lines only are ignored
:param ignore_case: upper case / lower case only differences are ignored
:param ignore_space_changes: differences in amount of spaces are ignored
:param context: the number of "equal" lines kept for representing
the context of the change
:return: generator of grouped `difflib.SequenceMatcher` opcodes
If none of the ``ignore_*`` parameters is `True`, there's nothing
to filter out the results will come straight from the
SequenceMatcher.
"""
hunks = get_hunks(fromlines, tolines, context)
if ignore_space_changes or ignore_case or ignore_blank_lines:
hunks = filter_ignorable_lines(hunks, fromlines, tolines, context,
ignore_blank_lines, ignore_case,
ignore_space_changes)
return hunks
def get_hunks(fromlines, tolines, context=None):
"""Generator yielding grouped opcodes describing differences .
See `get_filtered_hunks` for the parameter descriptions.
"""
matcher = difflib.SequenceMatcher(None, fromlines, tolines)
if context is None:
return (hunk for hunk in [matcher.get_opcodes()])
else:
return matcher.get_grouped_opcodes(context)
def filter_ignorable_lines(hunks, fromlines, tolines, context,
ignore_blank_lines, ignore_case,
ignore_space_changes):
"""Detect line changes that should be ignored and emits them as
tagged as "equal", possibly joined with the preceding and/or
following "equal" block.
See `get_filtered_hunks` for the parameter descriptions.
"""
def is_ignorable(tag, fromlines, tolines):
if tag == 'delete' and ignore_blank_lines:
if ''.join(fromlines) == '':
return True
elif tag == 'insert' and ignore_blank_lines:
if ''.join(tolines) == '':
return True
elif tag == 'replace' and (ignore_case or ignore_space_changes):
if len(fromlines) != len(tolines):
return False
def f(str):
if ignore_case:
str = str.lower()
if ignore_space_changes:
str = ' '.join(str.split())
return str
for i in range(len(fromlines)):
if f(fromlines[i]) != f(tolines[i]):
return False
return True
hunks = list(hunks)
opcodes = []
ignored_lines = False
prev = None
for hunk in hunks:
for tag, i1, i2, j1, j2 in hunk:
if tag == 'equal':
if prev:
prev = (tag, prev[1], i2, prev[3], j2)
else:
prev = (tag, i1, i2, j1, j2)
else:
if is_ignorable(tag, fromlines[i1:i2], tolines[j1:j2]):
ignored_lines = True
if prev:
prev = 'equal', prev[1], i2, prev[3], j2
else:
prev = 'equal', i1, i2, j1, j2
continue
if prev:
opcodes.append(prev)
opcodes.append((tag, i1, i2, j1, j2))
prev = None
if prev:
opcodes.append(prev)
if ignored_lines:
if context is None:
yield opcodes
else:
# we leave at most n lines with the tag 'equal' before and after
# every change
n = context
nn = n + n
group = []
def all_equal():
all(op[0] == 'equal' for op in group)
for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
if idx == 0 and tag == 'equal': # Fixup leading unchanged block
i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
elif tag == 'equal' and i2 - i1 > nn:
group.append((tag, i1, min(i2, i1 + n), j1,
min(j2, j1 + n)))
if not all_equal():
yield group
group = []
i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
group.append((tag, i1, i2, j1, j2))
if group and not (len(group) == 1 and group[0][0] == 'equal'):
if group[-1][0] == 'equal': # Fixup trailing unchanged block
tag, i1, i2, j1, j2 = group[-1]
group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
if not all_equal():
yield group
else:
for hunk in hunks:
yield hunk
def hdf_diff(*args, **kwargs):
""":deprecated: use `diff_blocks` (will be removed in 1.1.1)"""
return diff_blocks(*args, **kwargs)
def diff_blocks(fromlines, tolines, context=None, tabwidth=8,
ignore_blank_lines=0, ignore_case=0, ignore_space_changes=0):
"""Return an array that is adequate for adding to the data dictionary
See `get_filtered_hunks` for the parameter descriptions.
See also the diff_div.html template.
"""
type_map = {'replace': 'mod', 'delete': 'rem', 'insert': 'add',
'equal': 'unmod'}
space_re = re.compile(' ( +)|^ ')
def htmlify(match):
div, mod = divmod(len(match.group(0)), 2)
return div * ' ' + mod * ' '
def markup_intraline_changes(opcodes):
for tag, i1, i2, j1, j2 in opcodes:
if tag == 'replace' and i2 - i1 == j2 - j1:
for i in range(i2 - i1):
fromline, toline = fromlines[i1 + i], tolines[j1 + i]
(start, end) = get_change_extent(fromline, toline)
if start != 0 or end != 0:
last = end + len(fromline)
fromlines[i1 + i] = (
fromline[:start] + '\0' + fromline[start:last] +
'\1' + fromline[last:])
last = end+len(toline)
tolines[j1 + i] = (
toline[:start] + '\0' + toline[start:last] +
'\1' + toline[last:])
yield tag, i1, i2, j1, j2
changes = []
for group in get_filtered_hunks(fromlines, tolines, context,
ignore_blank_lines, ignore_case,
ignore_space_changes):
blocks = []
last_tag = None
for tag, i1, i2, j1, j2 in markup_intraline_changes(group):
if tag != last_tag:
blocks.append({'type': type_map[tag],
'base': {'offset': i1, 'lines': []},
'changed': {'offset': j1, 'lines': []}})
if tag == 'equal':
for line in fromlines[i1:i2]:
line = line.expandtabs(tabwidth)
line = space_re.sub(htmlify, escape(line, quotes=False))
blocks[-1]['base']['lines'].append(Markup(unicode(line)))
for line in tolines[j1:j2]:
line = line.expandtabs(tabwidth)
line = space_re.sub(htmlify, escape(line, quotes=False))
blocks[-1]['changed']['lines'].append(Markup(unicode(line)))
else:
if tag in ('replace', 'delete'):
for line in fromlines[i1:i2]:
line = expandtabs(line, tabwidth, '\0\1')
line = escape(line, quotes=False)
line = '<del>'.join([space_re.sub(htmlify, seg)
for seg in line.split('\0')])
line = line.replace('\1', '</del>')
blocks[-1]['base']['lines'].append(
Markup(unicode(line)))
if tag in ('replace', 'insert'):
for line in tolines[j1:j2]:
line = expandtabs(line, tabwidth, '\0\1')
line = escape(line, quotes=False)
line = '<ins>'.join([space_re.sub(htmlify, seg)
for seg in line.split('\0')])
line = line.replace('\1', '</ins>')
blocks[-1]['changed']['lines'].append(
Markup(unicode(line)))
changes.append(blocks)
return changes
def unified_diff(fromlines, tolines, context=None, ignore_blank_lines=0,
ignore_case=0, ignore_space_changes=0):
"""Generator producing lines corresponding to a textual diff.
See `get_filtered_hunks` for the parameter descriptions.
"""
for group in get_filtered_hunks(fromlines, tolines, context,
ignore_blank_lines, ignore_case,
ignore_space_changes):
i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
if i1 == 0 and i2 == 0:
i1, i2 = -1, -1 # support for 'A'dd changes
yield '@@ -%d,%d +%d,%d @@' % (i1 + 1, i2 - i1, j1 + 1, j2 - j1)
for tag, i1, i2, j1, j2 in group:
if tag == 'equal':
for line in fromlines[i1:i2]:
yield ' ' + line
else:
if tag in ('replace', 'delete'):
for line in fromlines[i1:i2]:
yield '-' + line
if tag in ('replace', 'insert'):
for line in tolines[j1:j2]:
yield '+' + line
def get_diff_options(req):
"""Retrieve user preferences for diffs.
:return: ``(style, options, data)`` triple.
``style``
can be ``'inline'`` or ``'sidebyside'``,
``options``
a sequence of "diff" flags,
``data``
the style and options information represented as
key/value pairs in dictionaries, for example::
{'style': u'sidebyside',
'options': {'contextall': 1, 'contextlines': 2,
'ignorecase': 0, 'ignoreblanklines': 0,
'ignorewhitespace': 1}}
"""
options_data = {}
data = {'options': options_data}
def get_bool_option(name, default=0):
pref = int(req.session.get('diff_' + name, default))
arg = int(name in req.args)
if 'update' in req.args and arg != pref:
req.session.set('diff_' + name, arg, default)
else:
arg = pref
return arg
pref = req.session.get('diff_style', 'inline')
style = req.args.get('style', pref)
if 'update' in req.args and style != pref:
req.session.set('diff_style', style, 'inline')
data['style'] = style
pref = int(req.session.get('diff_contextlines', 2))
try:
context = int(req.args.get('contextlines', pref))
except ValueError:
context = -1
if 'update' in req.args and context != pref:
req.session.set('diff_contextlines', context, 2)
options_data['contextlines'] = context
arg = int(req.args.get('contextall', 0))
options_data['contextall'] = arg
options = ['-U%d' % (-1 if arg else context)]
arg = get_bool_option('ignoreblanklines')
if arg:
options.append('-B')
options_data['ignoreblanklines'] = arg
arg = get_bool_option('ignorecase')
if arg:
options.append('-i')
options_data['ignorecase'] = arg
arg = get_bool_option('ignorewhitespace')
if arg:
options.append('-b')
options_data['ignorewhitespace'] = arg
return (style, options, data)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/svn_fs.py | trac/trac/versioncontrol/svn_fs.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.util import import_namespace
import_namespace(globals(), 'tracopt.versioncontrol.svn.svn_fs')
# This module is a stub provided for backward compatibility. The svn_fs
# module has been moved to tracopt.versioncontrol.svn. Please update your
# code to use the new location.
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/__init__.py | trac/trac/versioncontrol/__init__.py | from trac.versioncontrol.api import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/cache.py | trac/trac/versioncontrol/cache.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from __future__ import with_statement
import os
from trac.cache import cached
from trac.core import TracError
from trac.util.datefmt import from_utimestamp, to_utimestamp
from trac.util.translation import _
from trac.versioncontrol import Changeset, Node, Repository, NoSuchChangeset
_kindmap = {'D': Node.DIRECTORY, 'F': Node.FILE}
_actionmap = {'A': Changeset.ADD, 'C': Changeset.COPY,
'D': Changeset.DELETE, 'E': Changeset.EDIT,
'M': Changeset.MOVE}
def _invert_dict(d):
return dict(zip(d.values(), d.keys()))
_inverted_kindmap = _invert_dict(_kindmap)
_inverted_actionmap = _invert_dict(_actionmap)
CACHE_REPOSITORY_DIR = 'repository_dir'
CACHE_YOUNGEST_REV = 'youngest_rev'
CACHE_METADATA_KEYS = (CACHE_REPOSITORY_DIR, CACHE_YOUNGEST_REV)
class CachedRepository(Repository):
has_linear_changesets = False
scope = property(lambda self: self.repos.scope)
def __init__(self, env, repos, log):
self.env = env
self.repos = repos
self._metadata_id = str(self.repos.id)
Repository.__init__(self, repos.name, repos.params, log)
def close(self):
self.repos.close()
def get_base(self):
return self.repos.get_base()
def get_quickjump_entries(self, rev):
return self.repos.get_quickjump_entries(self.normalize_rev(rev))
def get_path_url(self, path, rev):
return self.repos.get_path_url(path, rev)
def get_changeset(self, rev):
return CachedChangeset(self, self.normalize_rev(rev), self.env)
def get_changeset_uid(self, rev):
return self.repos.get_changeset_uid(rev)
def get_changesets(self, start, stop):
for rev, in self.env.db_query("""
SELECT rev FROM revision
WHERE repos=%s AND time >= %s AND time < %s
ORDER BY time DESC, rev DESC
""", (self.id, to_utimestamp(start), to_utimestamp(stop))):
try:
yield self.get_changeset(rev)
except NoSuchChangeset:
pass # skip changesets currently being resync'ed
def sync_changeset(self, rev):
cset = self.repos.get_changeset(rev)
srev = self.db_rev(cset.rev)
old_cset = None
with self.env.db_transaction as db:
for time, author, message in db("""
SELECT time, author, message FROM revision
WHERE repos=%s AND rev=%s
""", (self.id, srev)):
old_cset = Changeset(self.repos, cset.rev, message, author,
from_utimestamp(time))
if old_cset:
db("""UPDATE revision SET time=%s, author=%s, message=%s
WHERE repos=%s AND rev=%s
""", (to_utimestamp(cset.date), cset.author,
cset.message, self.id, srev))
else:
self._insert_changeset(db, rev, cset)
return old_cset
@cached('_metadata_id')
def metadata(self):
"""Retrieve data for the cached `metadata` attribute."""
return dict(self.env.db_query("""
SELECT name, value FROM repository
WHERE id=%%s AND name IN (%s)
""" % ','.join(['%s'] * len(CACHE_METADATA_KEYS)),
(self.id,) + CACHE_METADATA_KEYS))
def sync(self, feedback=None, clean=False):
if clean:
self.log.info("Cleaning cache")
with self.env.db_transaction as db:
db("DELETE FROM revision WHERE repos=%s",
(self.id,))
db("DELETE FROM node_change WHERE repos=%s",
(self.id,))
db.executemany("DELETE FROM repository WHERE id=%s AND name=%s",
[(self.id, k) for k in CACHE_METADATA_KEYS])
db.executemany("""
INSERT INTO repository (id, name, value)
VALUES (%s, %s, %s)
""", [(self.id, k, '') for k in CACHE_METADATA_KEYS])
del self.metadata
metadata = self.metadata
with self.env.db_transaction as db:
invalidate = False
# -- check that we're populating the cache for the correct
# repository
repository_dir = metadata.get(CACHE_REPOSITORY_DIR)
if repository_dir:
# directory part of the repo name can vary on case insensitive
# fs
if os.path.normcase(repository_dir) \
!= os.path.normcase(self.name):
self.log.info("'repository_dir' has changed from %r to %r",
repository_dir, self.name)
raise TracError(_("The repository directory has changed, "
"you should resynchronize the "
"repository with: trac-admin $ENV "
"repository resync '%(reponame)s'",
reponame=self.reponame or '(default)'))
elif repository_dir is None: #
self.log.info('Storing initial "repository_dir": %s',
self.name)
db("""INSERT INTO repository (id, name, value)
VALUES (%s, %s, %s)
""", (self.id, CACHE_REPOSITORY_DIR, self.name))
invalidate = True
else: # 'repository_dir' cleared by a resync
self.log.info('Resetting "repository_dir": %s', self.name)
db("UPDATE repository SET value=%s WHERE id=%s AND name=%s",
(self.name, self.id, CACHE_REPOSITORY_DIR))
invalidate = True
# -- insert a 'youngeset_rev' for the repository if necessary
if metadata.get(CACHE_YOUNGEST_REV) is None:
db("""INSERT INTO repository (id, name, value)
VALUES (%s, %s, %s)
""", (self.id, CACHE_YOUNGEST_REV, ''))
invalidate = True
if invalidate:
del self.metadata
# -- retrieve the youngest revision in the repository and the youngest
# revision cached so far
self.repos.clear()
repos_youngest = self.repos.youngest_rev
youngest = metadata.get(CACHE_YOUNGEST_REV)
# -- verify and normalize youngest revision
if youngest:
youngest = self.repos.normalize_rev(youngest)
if not youngest:
self.log.debug('normalize_rev failed (youngest_rev=%r)',
self.youngest_rev)
else:
self.log.debug('cache metadata undefined (youngest_rev=%r)',
self.youngest_rev)
youngest = None
# -- compare them and try to resync if different
next_youngest = None
if youngest != repos_youngest:
self.log.info("repos rev [%s] != cached rev [%s]",
repos_youngest, youngest)
if youngest:
next_youngest = self.repos.next_rev(youngest)
else:
try:
next_youngest = self.repos.oldest_rev
# Ugly hack needed because doing that everytime in
# oldest_rev suffers from horrendeous performance (#5213)
if self.repos.scope != '/' and not \
self.repos.has_node('/', next_youngest):
next_youngest = self.repos.next_rev(next_youngest,
find_initial_rev=True)
next_youngest = self.repos.normalize_rev(next_youngest)
except TracError:
# can't normalize oldest_rev: repository was empty
return
if next_youngest is None: # nothing to cache yet
return
srev = self.db_rev(next_youngest)
# 0. first check if there's no (obvious) resync in progress
with self.env.db_query as db:
for rev, in db(
"SELECT rev FROM revision WHERE repos=%s AND rev=%s",
(self.id, srev)):
# already there, but in progress, so keep ''previous''
# notion of 'youngest'
self.repos.clear(youngest_rev=youngest)
return
# prepare for resyncing (there might still be a race
# condition at this point)
while next_youngest is not None:
srev = self.db_rev(next_youngest)
with self.env.db_transaction as db:
self.log.info("Trying to sync revision [%s]",
next_youngest)
cset = self.repos.get_changeset(next_youngest)
try:
# steps 1. and 2.
self._insert_changeset(db, next_youngest, cset)
except Exception, e: # *another* 1.1. resync attempt won
self.log.warning('Revision %s already cached: %r',
next_youngest, e)
# the other resync attempts is also
# potentially still in progress, so for our
# process/thread, keep ''previous'' notion of
# 'youngest'
self.repos.clear(youngest_rev=youngest)
# FIXME: This aborts a containing transaction
db.rollback()
return
# 3. update 'youngest_rev' metadata (minimize
# possibility of failures at point 0.)
db("""
UPDATE repository SET value=%s WHERE id=%s AND name=%s
""", (str(next_youngest), self.id, CACHE_YOUNGEST_REV))
del self.metadata
# 4. iterate (1. should always succeed now)
youngest = next_youngest
next_youngest = self.repos.next_rev(next_youngest)
# 5. provide some feedback
if feedback:
feedback(youngest)
def _insert_changeset(self, db, rev, cset):
srev = self.db_rev(rev)
# 1. Attempt to resync the 'revision' table. In case of
# concurrent syncs, only such insert into the `revision` table
# will succeed, the others will fail and raise an exception.
db("""
INSERT INTO revision (repos,rev,time,author,message)
VALUES (%s,%s,%s,%s,%s)
""", (self.id, srev, to_utimestamp(cset.date),
cset.author, cset.message))
# 2. now *only* one process was able to get there (i.e. there
# *shouldn't* be any race condition here)
for path, kind, action, bpath, brev in cset.get_changes():
self.log.debug("Caching node change in [%s]: %r", rev,
(path, kind, action, bpath, brev))
kind = _inverted_kindmap[kind]
action = _inverted_actionmap[action]
db("""
INSERT INTO node_change
(repos,rev,path,node_type,change_type,base_path,
base_rev)
VALUES (%s,%s,%s,%s,%s,%s,%s)
""", (self.id, srev, path, kind, action, bpath, brev))
def get_node(self, path, rev=None):
return self.repos.get_node(path, self.normalize_rev(rev))
def _get_node_revs(self, path, last=None, first=None):
"""Return the revisions affecting `path` between `first` and `last`
revisions.
"""
last = self.normalize_rev(last)
slast = self.db_rev(last)
node = self.get_node(path, last) # Check node existence
with self.env.db_query as db:
if first is None:
first = db("""
SELECT rev FROM node_change
WHERE repos=%s AND rev<=%s AND path=%s
AND change_type IN ('A', 'C', 'M')
ORDER BY rev DESC LIMIT 1
""", (self.id, slast, path))
first = int(first[0][0]) if first else 0
sfirst = self.db_rev(first)
return [int(rev) for rev, in db("""
SELECT DISTINCT rev FROM node_change
WHERE repos=%%s AND rev>=%%s AND rev<=%%s
AND (path=%%s OR path %s)""" % db.like(),
(self.id, sfirst, slast, path,
db.like_escape(path + '/') + '%'))]
def has_node(self, path, rev=None):
return self.repos.has_node(path, self.normalize_rev(rev))
def get_oldest_rev(self):
return self.repos.oldest_rev
def get_youngest_rev(self):
return self.rev_db(self.metadata.get(CACHE_YOUNGEST_REV))
def previous_rev(self, rev, path=''):
if self.has_linear_changesets:
return self._next_prev_rev('<', rev, path)
else:
return self.repos.previous_rev(self.normalize_rev(rev), path)
def next_rev(self, rev, path=''):
if self.has_linear_changesets:
return self._next_prev_rev('>', rev, path)
else:
return self.repos.next_rev(self.normalize_rev(rev), path)
def _next_prev_rev(self, direction, rev, path=''):
srev = self.db_rev(rev)
with self.env.db_query as db:
# the changeset revs are sequence of ints:
sql = "SELECT rev FROM node_change WHERE repos=%s AND " + \
"rev" + direction + "%s"
args = [self.id, srev]
if path:
path = path.lstrip('/')
# changes on path itself or its children
sql += " AND (path=%s OR path " + db.like()
args.extend((path, db.like_escape(path + '/') + '%'))
# deletion of path ancestors
components = path.lstrip('/').split('/')
parents = ','.join(('%s',) * len(components))
sql += " OR (path IN (" + parents + ") AND change_type='D'))"
for i in range(1, len(components) + 1):
args.append('/'.join(components[:i]))
sql += " ORDER BY rev" + (" DESC" if direction == '<' else "") \
+ " LIMIT 1"
for rev, in db(sql, args):
return int(rev)
def rev_older_than(self, rev1, rev2):
return self.repos.rev_older_than(self.normalize_rev(rev1),
self.normalize_rev(rev2))
def get_path_history(self, path, rev=None, limit=None):
return self.repos.get_path_history(path, self.normalize_rev(rev),
limit)
def normalize_path(self, path):
return self.repos.normalize_path(path)
def normalize_rev(self, rev):
if rev is None or isinstance(rev, basestring) and \
rev.lower() in ('', 'head', 'latest', 'youngest'):
return self.rev_db(self.youngest_rev or 0)
else:
try:
rev = int(rev)
if rev <= self.youngest_rev:
return rev
except (ValueError, TypeError):
pass
raise NoSuchChangeset(rev)
def db_rev(self, rev):
"""Convert a revision to its representation in the database."""
return str(rev)
def rev_db(self, rev):
"""Convert a revision from its representation in the database."""
return rev
def get_changes(self, old_path, old_rev, new_path, new_rev,
ignore_ancestry=1):
return self.repos.get_changes(old_path, self.normalize_rev(old_rev),
new_path, self.normalize_rev(new_rev),
ignore_ancestry)
class CachedChangeset(Changeset):
def __init__(self, repos, rev, env):
self.env = env
for _date, author, message in self.env.db_query("""
SELECT time, author, message FROM revision
WHERE repos=%s AND rev=%s
""", (repos.id, repos.db_rev(rev))):
date = from_utimestamp(_date)
Changeset.__init__(self, repos, repos.rev_db(rev), message, author,
date)
break
else:
raise NoSuchChangeset(rev)
def get_changes(self):
for path, kind, change, base_path, base_rev in sorted(
self.env.db_query("""
SELECT path, node_type, change_type, base_path, base_rev
FROM node_change WHERE repos=%s AND rev=%s
ORDER BY path
""", (self.repos.id, self.repos.db_rev(self.rev)))):
kind = _kindmap[kind]
change = _actionmap[change]
yield path, kind, change, base_path, self.repos.rev_db(base_rev)
def get_properties(self):
return self.repos.repos.get_changeset(self.rev).get_properties()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/svn_authz.py | trac/trac/versioncontrol/tests/svn_authz.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import os.path
import tempfile
import unittest
from trac.resource import Resource
from trac.test import EnvironmentStub, Mock
from trac.util import create_file
from trac.versioncontrol.api import RepositoryManager
from trac.versioncontrol.svn_authz import AuthzSourcePolicy, ParseError, \
parse
class AuthzParserTestCase(unittest.TestCase):
def test_parse_file(self):
authz = parse("""\
[groups]
developers = foo, bar
users = @developers, &baz
[aliases]
baz = CN=Hàröld Hacker,OU=Enginéers,DC=red-bean,DC=com
# Applies to all repositories
[/]
* = r
[/trunk]
@developers = rw
&baz =
@users = r
[/branches]
bar = rw
; Applies only to module
[module:/trunk]
foo = rw
&baz = r
; Unicode module names
[module:/c/résumé]
bar = rw
; Unused module, not parsed
[unused:/some/path]
foo = r
""", set(['', 'module']))
self.assertEqual({
'': {
'/': {
'*': True,
},
'/trunk': {
'foo': True,
'bar': True,
u'CN=Hàröld Hacker,OU=Enginéers,DC=red-bean,DC=com': False,
},
'/branches': {
'bar': True,
},
},
'module': {
'/trunk': {
'foo': True,
u'CN=Hàröld Hacker,OU=Enginéers,DC=red-bean,DC=com': True,
},
u'/c/résumé': {
'bar': True,
},
},
}, authz)
def test_parse_errors(self):
self.assertRaises(ParseError, parse, """\
user = r
[module:/trunk]
user = r
""", set(['', 'module']))
self.assertRaises(ParseError, parse, """\
[module:/trunk]
user
""", set(['', 'module']))
class AuthzSourcePolicyTestCase(unittest.TestCase):
def setUp(self):
tmpdir = os.path.realpath(tempfile.gettempdir())
self.authz = os.path.join(tmpdir, 'trac-authz')
create_file(self.authz, """\
[groups]
group1 = user
group2 = @group1
cycle1 = @cycle2
cycle2 = @cycle3
cycle3 = @cycle1, user
alias1 = &jekyll
alias2 = @alias1
[aliases]
jekyll = Mr Hyde
# Read / write permissions
[/readonly]
user = r
[/writeonly]
user = w
[/readwrite]
user = rw
[/empty]
user =
# Trailing slashes
[/trailing_a]
user = r
[/trailing_b/]
user = r
# Sub-paths
[/sub/path]
user = r
# Module usage
[module:/module_a]
user = r
[other:/module_b]
user = r
[/module_c]
user = r
[module:/module_d]
user =
[/module_d]
user = r
# Wildcards
[/wildcard]
* = r
# Special tokens
[/special/anonymous]
$anonymous = r
[/special/authenticated]
$authenticated = r
# Groups
[/groups_a]
@group1 = r
[/groups_b]
@group2 = r
[/cyclic]
@cycle1 = r
# Precedence
[module:/precedence_a]
user =
[/precedence_a]
user = r
[/precedence_b]
user = r
[/precedence_b/sub]
user =
[/precedence_b/sub/test]
user = r
[/precedence_c]
user =
@group1 = r
[/precedence_d]
@group1 = r
user =
# Aliases
[/aliases_a]
&jekyll = r
[/aliases_b]
@alias2 = r
# Scoped repository
[scoped:/scope/dir1]
joe = r
[scoped:/scope/dir2]
jane = r
""")
self.env = EnvironmentStub(enable=[AuthzSourcePolicy])
self.env.config.set('trac', 'authz_file', self.authz)
self.policy = AuthzSourcePolicy(self.env)
# Monkey-subclass RepositoryManager to serve mock repositories
rm = RepositoryManager(self.env)
class TestRepositoryManager(rm.__class__):
def get_real_repositories(self):
return set([Mock(reponame='module'),
Mock(reponame='other'),
Mock(reponame='scoped')])
def get_repository(self, reponame):
if reponame == 'scoped':
def get_changeset(rev):
if rev == 123:
def get_changes():
yield ('/dir1/file',)
elif rev == 456:
def get_changes():
yield ('/dir2/file',)
else:
def get_changes():
return iter([])
return Mock(get_changes=get_changes)
return Mock(scope='/scope',
get_changeset=get_changeset)
return Mock(scope='/')
rm.__class__ = TestRepositoryManager
def tearDown(self):
self.env.reset_db()
os.remove(self.authz)
def assertPathPerm(self, result, user, reponame=None, path=None):
"""Assert that `user` is granted access `result` to `path` within
the repository `reponame`.
"""
resource = None
if reponame is not None:
resource = Resource('source', path,
parent=Resource('repository', reponame))
for perm in ('BROWSER_VIEW', 'FILE_VIEW', 'LOG_VIEW'):
check = self.policy.check_permission(perm, user, resource, None)
self.assertEqual(result, check)
def assertRevPerm(self, result, user, reponame=None, rev=None):
"""Assert that `user` is granted access `result` to `rev` within
the repository `reponame`.
"""
resource = None
if reponame is not None:
resource = Resource('changeset', rev,
parent=Resource('repository', reponame))
check = self.policy.check_permission('CHANGESET_VIEW', user, resource,
None)
self.assertEqual(result, check)
def test_coarse_permissions(self):
# Granted to all due to wildcard
self.assertPathPerm(True, 'unknown')
self.assertPathPerm(True, 'joe')
self.assertRevPerm(True, 'unknown')
self.assertRevPerm(True, 'joe')
# Granted if at least one fine permission is granted
self.policy._mtime = 0
create_file(self.authz, """\
[/somepath]
joe = r
denied =
[module:/otherpath]
jane = r
$anonymous = r
[inactive:/not-in-this-instance]
unknown = r
""")
self.assertPathPerm(None, 'unknown')
self.assertRevPerm(None, 'unknown')
self.assertPathPerm(None, 'denied')
self.assertRevPerm(None, 'denied')
self.assertPathPerm(True, 'joe')
self.assertRevPerm(True, 'joe')
self.assertPathPerm(True, 'jane')
self.assertRevPerm(True, 'jane')
self.assertPathPerm(True, 'anonymous')
self.assertRevPerm(True, 'anonymous')
def test_default_permission(self):
# By default, permissions are undecided
self.assertPathPerm(None, 'joe', '', '/not_defined')
self.assertPathPerm(None, 'jane', 'repo', '/not/defined/either')
def test_read_write(self):
# Allow 'r' and 'rw' entries, deny 'w' and empty entries
self.assertPathPerm(True, 'user', '', '/readonly')
self.assertPathPerm(True, 'user', '', '/readwrite')
self.assertPathPerm(False, 'user', '', '/writeonly')
self.assertPathPerm(False, 'user', '', '/empty')
def test_trailing_slashes(self):
# Combinations of trailing slashes in the file and in the path
self.assertPathPerm(True, 'user', '', '/trailing_a')
self.assertPathPerm(True, 'user', '', '/trailing_a/')
self.assertPathPerm(True, 'user', '', '/trailing_b')
self.assertPathPerm(True, 'user', '', '/trailing_b/')
def test_sub_path(self):
# Permissions are inherited from containing directories
self.assertPathPerm(True, 'user', '', '/sub/path')
self.assertPathPerm(True, 'user', '', '/sub/path/test')
self.assertPathPerm(True, 'user', '', '/sub/path/other/sub')
def test_module_usage(self):
# If a module name is specified, the rules are specific to the module
self.assertPathPerm(True, 'user', 'module', '/module_a')
self.assertPathPerm(None, 'user', 'module', '/module_b')
# If a module is specified, but the configuration contains a non-module
# path, the non-module path can still apply
self.assertPathPerm(True, 'user', 'module', '/module_c')
# The module-specific rule takes precedence
self.assertPathPerm(False, 'user', 'module', '/module_d')
def test_wildcard(self):
# The * wildcard matches all users, including anonymous
self.assertPathPerm(True, 'anonymous', '', '/wildcard')
self.assertPathPerm(True, 'joe', '', '/wildcard')
self.assertPathPerm(True, 'jane', '', '/wildcard')
def test_special_tokens(self):
# The $anonymous token matches only anonymous users
self.assertPathPerm(True, 'anonymous', '', '/special/anonymous')
self.assertPathPerm(None, 'user', '', '/special/anonymous')
# The $authenticated token matches all authenticated users
self.assertPathPerm(None, 'anonymous', '', '/special/authenticated')
self.assertPathPerm(True, 'joe', '', '/special/authenticated')
self.assertPathPerm(True, 'jane', '', '/special/authenticated')
def test_groups(self):
# Groups are specified in a separate section and used with an @ prefix
self.assertPathPerm(True, 'user', '', '/groups_a')
# Groups can also be members of other groups
self.assertPathPerm(True, 'user', '', '/groups_b')
# Groups should not be defined cyclically, but they are still handled
# correctly to avoid infinite loops
self.assertPathPerm(True, 'user', '', '/cyclic')
def test_precedence(self):
# Module-specific sections take precedence over non-module sections
self.assertPathPerm(False, 'user', 'module', '/precedence_a')
# The most specific section applies
self.assertPathPerm(True, 'user', '', '/precedence_b/sub/test')
# ... intentional deviation from SVN's rules as we need to
# make '/precedence_b/sub' browseable so that the user can see
# '/precedence_b/sub/test':
self.assertPathPerm(True, 'user', '', '/precedence_b/sub')
self.assertPathPerm(True, 'user', '', '/precedence_b')
# Within a section, the first matching rule applies
self.assertPathPerm(False, 'user', '', '/precedence_c')
self.assertPathPerm(True, 'user', '', '/precedence_d')
def test_aliases(self):
# Aliases are specified in a separate section and used with an & prefix
self.assertPathPerm(True, 'Mr Hyde', '', '/aliases_a')
# Aliases can also be used in groups
self.assertPathPerm(True, 'Mr Hyde', '', '/aliases_b')
def test_scoped_repository(self):
# Take repository scope into account
self.assertPathPerm(True, 'joe', 'scoped', '/dir1')
self.assertPathPerm(None, 'joe', 'scoped', '/dir2')
self.assertPathPerm(True, 'joe', 'scoped', '/')
self.assertPathPerm(None, 'jane', 'scoped', '/dir1')
self.assertPathPerm(True, 'jane', 'scoped', '/dir2')
self.assertPathPerm(True, 'jane', 'scoped', '/')
def test_changesets(self):
# Changesets are allowed if at least one changed path is allowed, or
# if the changeset is empty
self.assertRevPerm(True, 'joe', 'scoped', 123)
self.assertRevPerm(None, 'joe', 'scoped', 456)
self.assertRevPerm(True, 'joe', 'scoped', 789)
self.assertRevPerm(None, 'jane', 'scoped', 123)
self.assertRevPerm(True, 'jane', 'scoped', 456)
self.assertRevPerm(True, 'jane', 'scoped', 789)
self.assertRevPerm(None, 'user', 'scoped', 123)
self.assertRevPerm(None, 'user', 'scoped', 456)
self.assertRevPerm(True, 'user', 'scoped', 789)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AuthzParserTestCase, 'test'))
suite.addTest(unittest.makeSuite(AuthzSourcePolicyTestCase, 'test'))
return suite
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/api.py | trac/trac/versioncontrol/tests/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 CommProve, Inc. <eli.carter@commprove.com>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Eli Carter <eli.carter@commprove.com>
import unittest
from trac.resource import Resource, get_resource_description, get_resource_url
from trac.test import EnvironmentStub
from trac.versioncontrol.api import Repository
class ApiTestCase(unittest.TestCase):
def setUp(self):
self.repo_base = Repository('testrepo', {'name': 'testrepo', 'id': 1},
None)
def test_raise_NotImplementedError_close(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.close)
def test_raise_NotImplementedError_get_changeset(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_changeset, 1)
def test_raise_NotImplementedError_get_node(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_node, 'path')
def test_raise_NotImplementedError_get_oldest_rev(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_oldest_rev)
def test_raise_NotImplementedError_get_youngest_rev(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_youngest_rev)
def test_raise_NotImplementedError_previous_rev(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.previous_rev, 1)
def test_raise_NotImplementedError_next_rev(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.next_rev, 1)
def test_raise_NotImplementedError_rev_older_than(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.rev_older_than, 1, 2)
def test_raise_NotImplementedError_get_path_history(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_path_history, 'path')
def test_raise_NotImplementedError_normalize_path(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.normalize_path, 'path')
def test_raise_NotImplementedError_normalize_rev(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.normalize_rev, 1)
def test_raise_NotImplementedError_get_changes(self):
self.failUnlessRaises(NotImplementedError, self.repo_base.get_changes, 'path', 1, 'path', 2)
class ResourceManagerTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(default_data=True)
def test_resource_changeset(self):
res = Resource('changeset', '42')
self.assertEqual('Changeset 42', get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/changeset/42',
get_resource_url(self.env, res, self.env.href))
repo = Resource('repository', 'repo')
res = Resource('changeset', '42', parent=repo)
self.assertEqual('Changeset 42 in repo',
get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/changeset/42/repo',
get_resource_url(self.env, res, self.env.href))
def test_resource_source(self):
res = Resource('source', '/trunk/src')
self.assertEqual('path /trunk/src',
get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/browser/trunk/src',
get_resource_url(self.env, res, self.env.href))
repo = Resource('repository', 'repo')
res = Resource('source', '/trunk/src', parent=repo)
self.assertEqual('path /trunk/src in repo',
get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/browser/repo/trunk/src',
get_resource_url(self.env, res, self.env.href))
repo = Resource('repository', 'repo')
res = Resource('source', '/trunk/src', version=42, parent=repo)
self.assertEqual('path /trunk/src@42 in repo',
get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/browser/repo/trunk/src?rev=42',
get_resource_url(self.env, res, self.env.href))
def test_resource_repository(self):
res = Resource('repository', 'testrepo')
self.assertEqual('Repository testrepo',
get_resource_description(self.env, res))
self.assertEqual('/trac.cgi/browser/testrepo',
get_resource_url(self.env, res, self.env.href))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ApiTestCase, 'test'))
suite.addTest(unittest.makeSuite(ResourceManagerTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/diff.py | trac/trac/versioncontrol/tests/diff.py | from trac.versioncontrol import diff
import unittest
def get_opcodes(*args, **kwargs):
for hunk in diff.get_filtered_hunks(*args, **kwargs):
for opcode in hunk:
yield opcode
class DiffTestCase(unittest.TestCase):
def testget_change_extent(self):
self.assertEqual((3, 0), diff.get_change_extent('xxx', 'xxx'))
self.assertEqual((0, 0), diff.get_change_extent('', 'xxx'))
self.assertEqual((0, 0), diff.get_change_extent('xxx', ''))
self.assertEqual((0, 0), diff.get_change_extent('xxx', 'yyy'))
self.assertEqual((1, -1), diff.get_change_extent('xxx', 'xyx'))
self.assertEqual((1, -1), diff.get_change_extent('xxx', 'xyyyx'))
self.assertEqual((1, 0), diff.get_change_extent('xy', 'xzz'))
self.assertEqual((1, -1), diff.get_change_extent('xyx', 'xzzx'))
self.assertEqual((1, -1), diff.get_change_extent('xzzx', 'xyx'))
def test_insert_blank_line(self):
opcodes = get_opcodes(['A', 'B'], ['A', 'B', ''], ignore_blank_lines=0)
self.assertEqual(('equal', 0, 2, 0, 2), opcodes.next())
self.assertEqual(('insert', 2, 2, 2, 3), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B'], ['A', 'B', ''], ignore_blank_lines=1)
self.assertEqual(('equal', 0, 2, 0, 3), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A'], ['A', 'B', ''], ignore_blank_lines=0)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('insert', 1, 1, 1, 3), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A'], ['A', 'B', ''], ignore_blank_lines=1)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('insert', 1, 1, 1, 3), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
def test_delete_blank_line(self):
opcodes = get_opcodes(['A', 'B', ''], ['A', 'B'], ignore_blank_lines=0)
self.assertEqual(('equal', 0, 2, 0, 2), opcodes.next())
self.assertEqual(('delete', 2, 3, 2, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B', ''], ['A', 'B'], ignore_blank_lines=1)
self.assertEqual(('equal', 0, 3, 0, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B', ''], ['A'], ignore_blank_lines=0)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('delete', 1, 3, 1, 1), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B', ''], ['A'], ignore_blank_lines=1)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('delete', 1, 3, 1, 1), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
def test_space_changes(self):
opcodes = get_opcodes(['A', 'B b'], ['A', 'B b'],
ignore_space_changes=0)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('replace', 1, 2, 1, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B b'], ['A', 'B b'],
ignore_space_changes=1)
self.assertEqual(('equal', 0, 2, 0, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
def test_case_changes(self):
opcodes = get_opcodes(['A', 'B b'], ['A', 'B B'], ignore_case=0)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('replace', 1, 2, 1, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B b'], ['A', 'B B'], ignore_case=1)
self.assertEqual(('equal', 0, 2, 0, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
def test_space_and_case_changes(self):
opcodes = get_opcodes(['A', 'B b'], ['A', 'B B'],
ignore_case=0, ignore_space_changes=0)
self.assertEqual(('equal', 0, 1, 0, 1), opcodes.next())
self.assertEqual(('replace', 1, 2, 1, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
opcodes = get_opcodes(['A', 'B b'], ['A', 'B B'],
ignore_case=1, ignore_space_changes=1)
self.assertEqual(('equal', 0, 2, 0, 2), opcodes.next())
self.assertRaises(StopIteration, opcodes.next)
def test_grouped_opcodes_context1(self):
groups = diff.get_filtered_hunks(
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'],
['A', 'B', 'C', 'd', 'e', 'f', 'G', 'H'], context=1)
group = groups.next()
self.assertRaises(StopIteration, groups.next)
self.assertEqual(('equal', 2, 3, 2, 3), group[0])
self.assertEqual(('replace', 3, 6, 3, 6), group[1])
self.assertEqual(('equal', 6, 7, 6, 7), group[2])
def test_grouped_opcodes_context1_ignorecase(self):
old = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']
new = ['X', 'B', 'C', 'd', 'e', 'f', 'G', 'Y']
groups = diff.get_filtered_hunks(old, new, context=1, ignore_case=1)
group = groups.next()
self.assertEqual([('replace', 0, 1, 0, 1), ('equal', 1, 2, 1, 2)],
group)
group = groups.next()
self.assertRaises(StopIteration, groups.next)
self.assertEqual([('equal', 6, 7, 6, 7), ('replace', 7, 8, 7, 8)],
group)
def test_grouped_opcodes_full_context(self):
old = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']
new = ['X', 'B', 'C', 'd', 'e', 'f', 'G', 'Y']
groups = diff.get_filtered_hunks(old, new, context=None)
group = groups.next()
self.assertRaises(StopIteration, groups.next)
self.assertEqual([
('replace', 0, 1, 0, 1),
('equal', 1, 3, 1, 3),
('replace', 3, 6, 3, 6),
('equal', 6, 7, 6, 7),
('replace', 7, 8, 7, 8),
], group)
groups = diff.get_filtered_hunks(old, new, context=None, ignore_case=1)
group = groups.next()
self.assertRaises(StopIteration, groups.next)
self.assertEqual([
('replace', 0, 1, 0, 1),
('equal', 1, 7, 1, 7),
('replace', 7, 8, 7, 8),
], group)
def test_grouped_opcodes_insert_blank_line_at_top(self):
"""
Regression test for #2090. Make sure that the equal block following an
insert at the top of a file is correct.
"""
groups = diff.get_filtered_hunks(['B', 'C', 'D', 'E', 'F', 'G'],
['A', 'B', 'C', 'D', 'E', 'F', 'G'],
context=3)
self.assertEqual([('insert', 0, 0, 0, 1), ('equal', 0, 3, 1, 4)],
groups.next())
self.assertRaises(StopIteration, groups.next)
def test_unified_diff_no_context(self):
diff_lines = list(diff.unified_diff(['a'], ['b']))
self.assertEqual(['@@ -1,1 +1,1 @@', '-a', '+b'], diff_lines)
def test_quotes_not_marked_up(self):
"""Make sure that the escape calls leave quotes along, we don't need
to escape them."""
changes = diff.diff_blocks(['ab'], ['a"b'])
self.assertEquals(len(changes), 1)
blocks = changes[0]
self.assertEquals(len(blocks), 1)
block = blocks[0]
self.assertEquals(block['type'], 'mod')
self.assertEquals(str(block['base']['lines'][0]), 'a<del></del>b')
self.assertEquals(str(block['changed']['lines'][0]), 'a<ins>"</ins>b')
def test_whitespace_marked_up1(self):
"""Regression test for #5795"""
changes = diff.diff_blocks(['*a'], [' *a'])
block = changes[0][0]
self.assertEquals(block['type'], 'mod')
self.assertEquals(str(block['base']['lines'][0]), '<del></del>*a')
self.assertEquals(str(block['changed']['lines'][0]),
'<ins> </ins>*a')
def test_whitespace_marked_up2(self):
"""Related to #5795"""
changes = diff.diff_blocks([' a'], [' b'])
block = changes[0][0]
self.assertEquals(block['type'], 'mod')
self.assertEquals(str(block['base']['lines'][0]),
' <del>a</del>')
self.assertEquals(str(block['changed']['lines'][0]),
' <ins>b</ins>')
def test_whitespace_marked_up3(self):
"""Related to #5795"""
changes = diff.diff_blocks(['a '], ['b '])
block = changes[0][0]
self.assertEquals(block['type'], 'mod')
self.assertEquals(str(block['base']['lines'][0]),
'<del>a</del> ')
self.assertEquals(str(block['changed']['lines'][0]),
'<ins>b</ins> ')
def test_expandtabs_works_right(self):
"""Regression test for #4557"""
changes = diff.diff_blocks(['aa\tb'], ['aaxb'])
block = changes[0][0]
self.assertEquals(block['type'], 'mod')
self.assertEquals(str(block['base']['lines'][0]),
'aa<del> </del>b')
self.assertEquals(str(block['changed']['lines'][0]),
'aa<ins>x</ins>b')
def suite():
return unittest.makeSuite(DiffTestCase, 'test')
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/functional.py | trac/trac/versioncontrol/tests/functional.py | #!/usr/bin/python
from trac.tests.functional import *
class TestEmptySvnRepo(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Check empty repository"""
browser_url = self._tester.url + '/browser'
tc.go(browser_url)
tc.url(browser_url)
# This tests the current behavior; I'm not sure it's the best
# behavior.
tc.follow('Last Change')
tc.find('Error: No such changeset')
tc.back()
tc.follow('Revision Log')
tc.notfind('Error: Nonexistent path')
class TestRepoCreation(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a directory tree in the repository"""
# This should probably use the svn bindings...
directories = []
for component in ('component1', 'component2'):
directories.append(component)
for subdir in ('branches', 'tags', 'trunk'):
directories.append('/'.join([component, subdir]))
commit_message = 'Create component trees.'
self._testenv.svn_mkdir(directories, commit_message)
browser_url = self._tester.url + '/browser'
tc.go(browser_url)
tc.url(browser_url)
tc.find('component1')
tc.find('component2')
tc.follow('Last Change')
tc.url(self._tester.url + '/changeset/1/')
tc.find(commit_message)
for directory in directories:
tc.find(directory)
tc.back()
tc.follow('Revision Log')
# (Note that our commit log message is short enough to avoid
# truncation.)
tc.find(commit_message)
tc.follow('Timeline')
# (Note that our commit log message is short enough to avoid
# truncation.)
tc.find(commit_message)
tc.formvalue('prefs', 'ticket', False)
tc.formvalue('prefs', 'milestone', False)
tc.formvalue('prefs', 'wiki', False)
tc.submit()
tc.find('by.*admin')
# (Note that our commit log message is short enough to avoid
# truncation.)
tc.find(commit_message)
class TestRepoBrowse(FunctionalTwillTestCaseSetup):
# TODO: move this out to a subversion-specific testing module
def runTest(self):
"""Add a file to the repository and verify it is in the browser"""
# Add a file to Subversion
tempfilename = random_word()
fulltempfilename = 'component1/trunk/' + tempfilename
revision = self._testenv.svn_add(fulltempfilename, random_page())
# Verify that it appears in the browser view:
browser_url = self._tester.url + '/browser'
tc.go(browser_url)
tc.url(browser_url)
tc.find('component1')
tc.follow('component1')
tc.follow('trunk')
tc.follow(tempfilename)
self._tester.quickjump('[%s]' % revision)
tc.find('Changeset %s' % revision)
tc.find('admin')
tc.find('Add %s' % fulltempfilename)
tc.find('1 added')
tc.follow('Timeline')
tc.find('Add %s' % fulltempfilename)
class TestNewFileLog(FunctionalTwillTestCaseSetup):
# TODO: move this out to a subversion-specific testing module
def runTest(self):
"""Verify browser log for a new file"""
tempfilename = random_word() + '_new.txt'
fulltempfilename = 'component1/trunk/' + tempfilename
revision = self._testenv.svn_add(fulltempfilename, '')
tc.go(self._tester.url + '/log/' + fulltempfilename)
tc.find('@%d' % revision)
tc.find('Add %s' % fulltempfilename)
class RegressionTestTicket5819(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5819
Events with identical dates are reversed in timeline
"""
# Multiple events very close together
files = ['a', 'b', 'c', 'd']
for filename in files:
# We do a mkdir because it's easy.
self._testenv.svn_mkdir(['component1/trunk/' + filename],
'Create component1/%s' % filename)
self._tester.go_to_timeline()
# They are supposed to show up in d, c, b, a order.
components = '.*'.join(['Create component1/%s' % f for f in
reversed(files)])
tc.find(components, 's')
class RegressionTestRev5877(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of the source browser fix in r5877"""
tc.go(self._tester.url + '/browser?range_min_secs=1')
tc.notfind(internal_error)
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional.testcases
suite = trac.tests.functional.testcases.functionalSuite()
if has_svn:
suite.addTest(TestEmptySvnRepo())
suite.addTest(TestRepoCreation())
suite.addTest(TestRepoBrowse())
suite.addTest(TestNewFileLog())
suite.addTest(RegressionTestTicket5819())
suite.addTest(RegressionTestRev5877())
else:
print "SKIP: versioncontrol/tests/functional.py (no svn bindings)"
return suite
if __name__ == '__main__':
unittest.main(defaultTest='functionalSuite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/__init__.py | trac/trac/versioncontrol/tests/__init__.py | import unittest
from trac.versioncontrol.tests import cache, diff, svn_authz, api
from trac.versioncontrol.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(cache.suite())
suite.addTest(diff.suite())
suite.addTest(svn_authz.suite())
suite.addTest(api.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/tests/cache.py | trac/trac/versioncontrol/tests/cache.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2005-2009 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from __future__ import with_statement
from datetime import datetime
from trac.test import EnvironmentStub, Mock
from trac.util.datefmt import to_utimestamp, utc
from trac.versioncontrol import Repository, Changeset, Node, NoSuchChangeset
from trac.versioncontrol.cache import CachedRepository
import unittest
class CacheTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.log = self.env.log
self.env.db_transaction.executemany(
"INSERT INTO repository (id, name, value) VALUES (%s, %s, %s)",
[(1, 'name', 'test-repos'),
(1, 'youngest_rev', '')])
def tearDown(self):
self.env.reset_db()
# Helpers
def get_repos(self, get_changeset=None, youngest_rev=1):
if get_changeset is None:
def no_changeset(rev):
raise NoSuchChangeset(rev)
get_changeset = no_changeset
return Mock(Repository, 'test-repos', {'name': 'test-repos', 'id': 1},
self.log,
get_changeset=get_changeset,
get_oldest_rev=lambda: 0,
get_youngest_rev=lambda: youngest_rev,
normalize_rev=lambda x: get_changeset(x).rev,
next_rev=(lambda x: int(x) < youngest_rev and x + 1 \
or None))
def preset_cache(self, *args):
"""Each arg is a (rev tuple, changes list of tuples) pair."""
with self.env.db_transaction as db:
for rev, changes in args:
db("""INSERT INTO revision (repos, rev, time, author, message)
VALUES (1,%s,%s,%s,%s)
""", rev)
if changes:
db.executemany("""
INSERT INTO node_change (repos, rev, path, node_type,
change_type, base_path,
base_rev)
VALUES (1, %s, %s, %s, %s, %s, %s)
""", [(rev[0],) + change for change in changes])
db("""UPDATE repository SET value=%s
WHERE id=1 AND name='youngest_rev'
""", (args[-1][0][0],))
# Tests
def test_initial_sync_with_empty_repos(self):
repos = self.get_repos()
cache = CachedRepository(self.env, repos, self.log)
cache.sync()
with self.env.db_query as db:
self.assertEquals([], db(
"SELECT rev, time, author, message FROM revision"))
self.assertEquals(0, db("SELECT COUNT(*) FROM node_change")[0][0])
def test_initial_sync(self):
t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
repos = self.get_repos(get_changeset=lambda x: changesets[int(x)],
youngest_rev=1)
changes = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
('trunk/README', Node.FILE, Changeset.ADD, None, None)]
changesets = [Mock(Changeset, repos, 0, '', '', t1,
get_changes=lambda: []),
Mock(Changeset, repos, 1, 'Import', 'joe', t2,
get_changes=lambda: iter(changes))]
cache = CachedRepository(self.env, repos, self.log)
cache.sync()
with self.env.db_query as db:
rows = db("SELECT rev, time, author, message FROM revision")
self.assertEquals(len(rows), 2)
self.assertEquals(('0', to_utimestamp(t1), '', ''), rows[0])
self.assertEquals(('1', to_utimestamp(t2), 'joe', 'Import'),
rows[1])
rows = db("""
SELECT rev, path, node_type, change_type, base_path, base_rev
FROM node_change""")
self.assertEquals(len(rows), 2)
self.assertEquals(('1', 'trunk', 'D', 'A', None, None), rows[0])
self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None),
rows[1])
def test_update_sync(self):
t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc)
self.preset_cache(
(('0', to_utimestamp(t1), '', ''), []),
(('1', to_utimestamp(t2), 'joe', 'Import'),
[('trunk', 'D', 'A', None, None),
('trunk/README', 'F', 'A', None, None)]),
)
repos = self.get_repos(get_changeset=lambda x: changesets[int(x)],
youngest_rev=2)
changes = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README',
1)]
changesets = [
None,
Mock(Changeset, repos, 1, '', '', t2, get_changes=lambda: []),
Mock(Changeset, repos, 2, 'Update', 'joe', t3,
get_changes=lambda: iter(changes))
]
cache = CachedRepository(self.env, repos, self.log)
cache.sync()
with self.env.db_query as db:
self.assertEquals([(to_utimestamp(t3), 'joe', 'Update')],
db("SELECT time, author, message FROM revision WHERE rev='2'"))
self.assertEquals([('trunk/README', 'F', 'E', 'trunk/README',
'1')],
db("""SELECT path, node_type, change_type, base_path,
base_rev
FROM node_change WHERE rev='2'"""))
def test_clean_sync(self):
t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
t3 = datetime(2003, 1, 1, 1, 1, 1, 0, utc)
self.preset_cache(
(('0', to_utimestamp(t1), '', ''), []),
(('1', to_utimestamp(t2), 'joe', 'Import'),
[('trunk', 'D', 'A', None, None),
('trunk/README', 'F', 'A', None, None)]),
)
repos = self.get_repos(get_changeset=lambda x: changesets[int(x)],
youngest_rev=2)
changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
('trunk/README', Node.FILE, Changeset.ADD, None, None)]
changes2 = [('trunk/README', Node.FILE, Changeset.EDIT, 'trunk/README',
1)]
changesets = [
Mock(Changeset, repos, 0, '**empty**', 'joe', t1,
get_changes=lambda: []),
Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2,
get_changes=lambda: iter(changes1)),
Mock(Changeset, repos, 2, 'Update', 'joe', t3,
get_changes=lambda: iter(changes2))
]
cache = CachedRepository(self.env, repos, self.log)
cache.sync(clean=True)
rows = self.env.db_query("""
SELECT time, author, message FROM revision ORDER BY rev
""")
self.assertEquals(3, len(rows))
self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), rows[0])
self.assertEquals((to_utimestamp(t2), 'joe', 'Initial Import'),
rows[1])
self.assertEquals((to_utimestamp(t3), 'joe', 'Update'), rows[2])
rows = self.env.db_query("""
SELECT rev, path, node_type, change_type, base_path, base_rev
FROM node_change ORDER BY rev, path""")
self.assertEquals(3, len(rows))
self.assertEquals(('1', 'trunk', 'D', 'A', None, None), rows[0])
self.assertEquals(('1', 'trunk/README', 'F', 'A', None, None), rows[1])
self.assertEquals(('2', 'trunk/README', 'F', 'E', 'trunk/README', '1'),
rows[2])
def test_sync_changeset(self):
t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
self.preset_cache(
(('0', to_utimestamp(t1), '', ''), []),
(('1', to_utimestamp(t2), 'joe', 'Import'),
[('trunk', 'D', 'A', None, None),
('trunk/README', 'F', 'A', None, None)]),
)
repos = self.get_repos(get_changeset=lambda x: changesets[int(x)],
youngest_rev=1)
changes1 = [('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
('trunk/README', Node.FILE, Changeset.ADD, None, None)]
changesets = [
Mock(Changeset, repos, 0, '**empty**', 'joe', t1,
get_changes=lambda: []),
Mock(Changeset, repos, 1, 'Initial Import', 'joe', t2,
get_changes=lambda: iter(changes1)),
]
cache = CachedRepository(self.env, repos, self.log)
cache.sync_changeset(0)
rows = self.env.db_query(
"SELECT time, author, message FROM revision ORDER BY rev")
self.assertEquals(2, len(rows))
self.assertEquals((to_utimestamp(t1), 'joe', '**empty**'), rows[0])
self.assertEquals((to_utimestamp(t2), 'joe', 'Import'), rows[1])
def test_sync_changeset_if_not_exists(self):
t = [
datetime(2001, 1, 1, 1, 1, 1, 0, utc), # r0
datetime(2002, 1, 1, 1, 1, 1, 0, utc), # r1
datetime(2003, 1, 1, 1, 1, 1, 0, utc), # r2
datetime(2004, 1, 1, 1, 1, 1, 0, utc), # r3
]
self.preset_cache(
(('0', to_utimestamp(t[0]), 'joe', '**empty**'), []),
(('1', to_utimestamp(t[1]), 'joe', 'Import'),
[('trunk', 'D', 'A', None, None),
('trunk/README', 'F', 'A', None, None)]),
# not exists r2
(('3', to_utimestamp(t[3]), 'joe', 'Add COPYING'),
[('trunk/COPYING', 'F', 'A', None, None)]),
)
repos = self.get_repos(get_changeset=lambda x: changesets[int(x)],
youngest_rev=3)
changes = [
None, # r0
[('trunk', Node.DIRECTORY, Changeset.ADD, None, None), # r1
('trunk/README', Node.FILE, Changeset.ADD, None, None)],
[('branches', Node.DIRECTORY, Changeset.ADD, None, None), # r2
('tags', Node.DIRECTORY, Changeset.ADD, None, None)],
[('trunk/COPYING', Node.FILE, Changeset.ADD, None, None)], # r3
]
changesets = [
Mock(Changeset, repos, 0, '**empty**', 'joe', t[0],
get_changes=lambda: []),
Mock(Changeset, repos, 1, 'Initial Import', 'joe', t[1],
get_changes=lambda: iter(changes[1])),
Mock(Changeset, repos, 2, 'Created directories', 'john', t[2],
get_changes=lambda: iter(changes[2])),
Mock(Changeset, repos, 3, 'Add COPYING', 'joe', t[3],
get_changes=lambda: iter(changes[3])),
]
cache = CachedRepository(self.env, repos, self.log)
self.assertRaises(NoSuchChangeset, cache.get_changeset, 2)
cache.sync()
self.assertRaises(NoSuchChangeset, cache.get_changeset, 2)
self.assertEqual(None, cache.sync_changeset(2))
cset = cache.get_changeset(2)
self.assertEqual('john', cset.author)
self.assertEqual('Created directories', cset.message)
self.assertEqual(t[2], cset.date)
cset_changes = cset.get_changes()
self.assertEqual(('branches', Node.DIRECTORY, Changeset.ADD, None,
None),
cset_changes.next())
self.assertEqual(('tags', Node.DIRECTORY, Changeset.ADD, None, None),
cset_changes.next())
self.assertRaises(StopIteration, cset_changes.next)
rows = self.env.db_query(
"SELECT time,author,message FROM revision ORDER BY rev")
self.assertEquals(4, len(rows))
self.assertEquals((to_utimestamp(t[0]), 'joe', '**empty**'), rows[0])
self.assertEquals((to_utimestamp(t[1]), 'joe', 'Import'), rows[1])
self.assertEquals((to_utimestamp(t[2]), 'john', 'Created directories'),
rows[2])
self.assertEquals((to_utimestamp(t[3]), 'joe', 'Add COPYING'), rows[3])
def test_get_changes(self):
t1 = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
self.preset_cache(
(('0', to_utimestamp(t1), '', ''), []),
(('1', to_utimestamp(t2), 'joe', 'Import'),
[('trunk', 'D', 'A', None, None),
('trunk/RDME', 'F', 'A', None, None)]),
)
repos = self.get_repos()
cache = CachedRepository(self.env, repos, self.log)
self.assertEqual('1', cache.youngest_rev)
changeset = cache.get_changeset(1)
self.assertEqual('joe', changeset.author)
self.assertEqual('Import', changeset.message)
self.assertEqual(t2, changeset.date)
changes = changeset.get_changes()
self.assertEqual(('trunk', Node.DIRECTORY, Changeset.ADD, None, None),
changes.next())
self.assertEqual(('trunk/RDME', Node.FILE, Changeset.ADD, None, None),
changes.next())
self.assertRaises(StopIteration, changes.next)
def suite():
return unittest.makeSuite(CacheTestCase, 'test')
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/util.py | trac/trac/versioncontrol/web_ui/util.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2005-2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christian Boos <cboos@edgewall.org>
from itertools import izip
from genshi.builder import tag
from trac.resource import ResourceNotFound
from trac.util.datefmt import datetime, utc
from trac.util.translation import tag_, _
from trac.versioncontrol.api import Changeset, NoSuchNode, NoSuchChangeset
__all__ = ['get_changes', 'get_path_links', 'get_existing_node',
'get_allowed_node', 'make_log_graph']
def get_changes(repos, revs, log=None):
changes = {}
for rev in revs:
if rev in changes:
continue
try:
changeset = repos.get_changeset(rev)
except NoSuchChangeset:
changeset = Changeset(repos, rev, '', '',
datetime(1970, 1, 1, tzinfo=utc))
if log is not None:
log.warning("Unable to get changeset [%s]", rev)
changes[rev] = changeset
return changes
def get_path_links(href, reponame, path, rev, order=None, desc=None):
desc = desc or None
links = [{'name': 'source:',
'href': href.browser(rev=rev if reponame == '' else None,
order=order, desc=desc)}]
if reponame:
links.append({
'name': reponame,
'href': href.browser(reponame, rev=rev, order=order, desc=desc)})
partial_path = ''
for part in [p for p in path.split('/') if p]:
partial_path += part + '/'
links.append({
'name': part,
'href': href.browser(reponame or None, partial_path, rev=rev,
order=order, desc=desc)
})
return links
def get_existing_node(req, repos, path, rev):
try:
return repos.get_node(path, rev)
except NoSuchNode, e:
# TRANSLATOR: You can 'search' in the repository history... (link)
search_a = tag.a(_("search"),
href=req.href.log(repos.reponame or None, path,
rev=rev, mode='path_history'))
raise ResourceNotFound(tag(
tag.p(e.message, class_="message"),
tag.p(tag_("You can %(search)s in the repository history to see "
"if that path existed but was later removed",
search=search_a))))
def get_allowed_node(repos, path, rev, perm):
if repos is not None:
try:
node = repos.get_node(path, rev)
except (NoSuchNode, NoSuchChangeset):
return None
if node.is_viewable(perm):
return node
def make_log_graph(repos, revs):
"""Generate graph information for the given revisions.
Returns a tuple `(threads, vertices, columns)`, where:
* `threads`: List of paint command lists `[(type, column, line)]`, where
`type` is either 0 for "move to" or 1 for "line to", and `column` and
`line` are coordinates.
* `vertices`: List of `(column, thread_index)` tuples, where the `i`th
item specifies the column in which to draw the dot in line `i` and the
corresponding thread.
* `columns`: Maximum width of the graph.
"""
threads = []
vertices = []
columns = 0
revs = iter(revs)
def add_edge(thread, column, line):
if thread and thread[-1][:2] == [1, column] \
and thread[-2][1] == column:
thread[-1][2] = line
else:
thread.append([1, column, line])
try:
next_rev = revs.next()
line = 0
active = []
active_thread = []
while True:
rev = next_rev
if rev not in active:
# Insert new head
threads.append([[0, len(active), line]])
active_thread.append(threads[-1])
active.append(rev)
columns = max(columns, len(active))
column = active.index(rev)
vertices.append((column, threads.index(active_thread[column])))
next_rev = revs.next() # Raises StopIteration when no more revs
next = active[:]
parents = list(repos.parent_revs(rev))
# Replace current item with parents not already present
new_parents = [p for p in parents if p not in active]
next[column : column + 1] = new_parents
# Add edges to parents
for col, (r, thread) in enumerate(izip(active, active_thread)):
if r in next:
add_edge(thread, next.index(r), line + 1)
elif r == rev:
if new_parents:
parents.remove(new_parents[0])
parents.append(new_parents[0])
for parent in parents:
if parent != parents[0]:
thread.append([0, col, line])
add_edge(thread, next.index(parent), line + 1)
if not new_parents:
del active_thread[column]
else:
base = len(threads)
threads.extend([[0, column + 1 + i, line + 1]]
for i in xrange(len(new_parents) - 1))
active_thread[column + 1 : column + 1] = threads[base:]
active = next
line += 1
except StopIteration:
pass
return threads, vertices, columns
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/log.py | trac/trac/versioncontrol/web_ui/log.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christian Boos <cboos@edgewall.org>
import re
from genshi.core import Markup
from genshi.builder import tag
from trac.config import IntOption, ListOption
from trac.core import *
from trac.perm import IPermissionRequestor
from trac.resource import ResourceNotFound
from trac.util import Ranges
from trac.util.text import to_unicode, wrap
from trac.util.translation import _
from trac.versioncontrol.api import (RepositoryManager, Changeset,
NoSuchChangeset)
from trac.versioncontrol.web_ui.changeset import ChangesetModule
from trac.versioncontrol.web_ui.util import *
from trac.web import IRequestHandler
from trac.web.chrome import (Chrome, INavigationContributor, add_ctxtnav,
add_link, add_script, add_script_data,
add_stylesheet, auth_link, web_context)
from trac.wiki import IWikiSyntaxProvider, WikiParser
class LogModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
IWikiSyntaxProvider)
default_log_limit = IntOption('revisionlog', 'default_log_limit', 100,
"""Default value for the limit argument in the TracRevisionLog.
(''since 0.11'')""")
graph_colors = ListOption('revisionlog', 'graph_colors',
['#cc0', '#0c0', '#0cc', '#00c', '#c0c', '#c00'],
doc="""Comma-separated list of colors to use for the TracRevisionLog
graph display. (''since 1.0'')""")
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'browser'
def get_navigation_items(self, req):
return []
# IPermissionRequestor methods
def get_permission_actions(self):
return ['LOG_VIEW']
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/log(/.*)?$', req.path_info)
if match:
req.args['path'] = match.group(1) or '/'
return True
def process_request(self, req):
req.perm.require('LOG_VIEW')
mode = req.args.get('mode', 'stop_on_copy')
path = req.args.get('path', '/')
rev = req.args.get('rev')
stop_rev = req.args.get('stop_rev')
revs = req.args.get('revs')
format = req.args.get('format')
verbose = req.args.get('verbose')
limit = int(req.args.get('limit') or self.default_log_limit)
rm = RepositoryManager(self.env)
reponame, repos, path = rm.get_repository_by_path(path)
if not repos:
raise ResourceNotFound(_("Repository '%(repo)s' not found",
repo=reponame))
if reponame != repos.reponame: # Redirect alias
qs = req.query_string
req.redirect(req.href.log(repos.reponame or None, path)
+ ('?' + qs if qs else ''))
normpath = repos.normalize_path(path)
# if `revs` parameter is given, then we're restricted to the
# corresponding revision ranges.
# If not, then we're considering all revisions since `rev`,
# on that path, in which case `revranges` will be None.
revranges = None
if revs:
try:
revranges = Ranges(revs)
rev = revranges.b
except ValueError:
pass
rev = unicode(repos.normalize_rev(rev))
display_rev = repos.display_rev
# The `history()` method depends on the mode:
# * for ''stop on copy'' and ''follow copies'', it's `Node.history()`
# unless explicit ranges have been specified
# * for ''show only add, delete'' we're using
# `Repository.get_path_history()`
cset_resource = repos.resource.child('changeset')
show_graph = False
if mode == 'path_history':
def history():
for h in repos.get_path_history(path, rev):
if 'CHANGESET_VIEW' in req.perm(cset_resource(id=h[1])):
yield h
elif revranges:
def history():
prevpath = path
expected_next_item = None
ranges = list(revranges.pairs)
ranges.reverse()
for (a, b) in ranges:
a = repos.normalize_rev(a)
b = repos.normalize_rev(b)
while not repos.rev_older_than(b, a):
node = get_existing_node(req, repos, prevpath, b)
node_history = list(node.get_history(2))
p, rev, chg = node_history[0]
if repos.rev_older_than(rev, a):
break # simply skip, no separator
if 'CHANGESET_VIEW' in req.perm(cset_resource(id=rev)):
if expected_next_item:
# check whether we're continuing previous range
np, nrev, nchg = expected_next_item
if rev != nrev: # no, we need a separator
yield (np, nrev, None)
yield node_history[0]
prevpath = node_history[-1][0] # follow copy
b = repos.previous_rev(rev)
if len(node_history) > 1:
expected_next_item = node_history[-1]
else:
expected_next_item = None
if expected_next_item:
yield (expected_next_item[0], expected_next_item[1], None)
else:
show_graph = path == '/' and not verbose \
and not repos.has_linear_changesets
def history():
node = get_existing_node(req, repos, path, rev)
for h in node.get_history():
if 'CHANGESET_VIEW' in req.perm(cset_resource(id=h[1])):
yield h
# -- retrieve history, asking for limit+1 results
info = []
depth = 1
previous_path = normpath
count = 0
for old_path, old_rev, old_chg in history():
if stop_rev and repos.rev_older_than(old_rev, stop_rev):
break
old_path = repos.normalize_path(old_path)
item = {
'path': old_path, 'rev': old_rev, 'existing_rev': old_rev,
'change': old_chg, 'depth': depth,
}
if old_chg == Changeset.DELETE:
item['existing_rev'] = repos.previous_rev(old_rev, old_path)
if not (mode == 'path_history' and old_chg == Changeset.EDIT):
info.append(item)
if old_path and old_path != previous_path and \
not (mode == 'path_history' and old_path == normpath):
depth += 1
item['depth'] = depth
item['copyfrom_path'] = old_path
if mode == 'stop_on_copy':
break
elif mode == 'path_history':
depth -= 1
if old_chg is None: # separator entry
stop_limit = limit
else:
count += 1
stop_limit = limit + 1
if count >= stop_limit:
break
previous_path = old_path
if info == []:
node = get_existing_node(req, repos, path, rev)
if repos.rev_older_than(stop_rev, node.created_rev):
# FIXME: we should send a 404 error here
raise TracError(_("The file or directory '%(path)s' doesn't "
"exist at revision %(rev)s or at any previous revision.",
path=path, rev=display_rev(rev)), _('Nonexistent path'))
# Generate graph data
graph = {}
if show_graph:
threads, vertices, columns = \
make_log_graph(repos, (item['rev'] for item in info))
graph.update(threads=threads, vertices=vertices, columns=columns,
colors=self.graph_colors,
line_width=0.04, dot_radius=0.1)
add_script(req, 'common/js/excanvas.js', ie_if='IE')
add_script(req, 'common/js/log_graph.js')
add_script_data(req, graph=graph)
def make_log_href(path, **args):
link_rev = rev
if rev == str(repos.youngest_rev):
link_rev = None
params = {'rev': link_rev, 'mode': mode, 'limit': limit}
params.update(args)
if verbose:
params['verbose'] = verbose
return req.href.log(repos.reponame or None, path, **params)
if format in ('rss', 'changelog'):
info = [i for i in info if i['change']] # drop separators
if info and count > limit:
del info[-1]
elif info and count >= limit:
# stop_limit reached, there _might_ be some more
next_rev = info[-1]['rev']
next_path = info[-1]['path']
next_revranges = None
if revranges:
next_revranges = str(revranges.truncate(next_rev))
if next_revranges or not revranges:
older_revisions_href = make_log_href(next_path, rev=next_rev,
revs=next_revranges)
add_link(req, 'next', older_revisions_href,
_('Revision Log (restarting at %(path)s, rev. %(rev)s)',
path=next_path, rev=display_rev(next_rev)))
# only show fully 'limit' results, use `change == None` as a marker
info[-1]['change'] = None
revisions = [i['rev'] for i in info]
changes = get_changes(repos, revisions, self.log)
extra_changes = {}
if format == 'changelog':
for rev in revisions:
changeset = changes[rev]
cs = {}
cs['message'] = wrap(changeset.message, 70,
initial_indent='\t',
subsequent_indent='\t')
files = []
actions = []
for cpath, kind, chg, bpath, brev in changeset.get_changes():
files.append(bpath if chg == Changeset.DELETE else cpath)
actions.append(chg)
cs['files'] = files
cs['actions'] = actions
extra_changes[rev] = cs
data = {
'context': web_context(req, 'source', path, parent=repos.resource),
'reponame': repos.reponame or None, 'repos': repos,
'path': path, 'rev': rev, 'stop_rev': stop_rev,
'display_rev': display_rev, 'revranges': revranges,
'mode': mode, 'verbose': verbose, 'limit' : limit,
'items': info, 'changes': changes, 'extra_changes': extra_changes,
'graph': graph,
'wiki_format_messages':
self.config['changeset'].getbool('wiki_format_messages')
}
if format == 'changelog':
return 'revisionlog.txt', data, 'text/plain'
elif format == 'rss':
data['email_map'] = Chrome(self.env).get_email_map()
data['context'] = web_context(req, 'source',
path, parent=repos.resource,
absurls=True)
return 'revisionlog.rss', data, 'application/rss+xml'
item_ranges = []
range = []
for item in info:
if item['change'] is None: # separator
if range: # start new range
range.append(item)
item_ranges.append(range)
range = []
else:
range.append(item)
if range:
item_ranges.append(range)
data['item_ranges'] = item_ranges
add_stylesheet(req, 'common/css/diff.css')
add_stylesheet(req, 'common/css/browser.css')
path_links = get_path_links(req.href, repos.reponame, path, rev)
if path_links:
data['path_links'] = path_links
if path != '/':
add_link(req, 'up', path_links[-2]['href'], _('Parent directory'))
rss_href = make_log_href(path, format='rss', revs=revs,
stop_rev=stop_rev)
add_link(req, 'alternate', auth_link(req, rss_href), _('RSS Feed'),
'application/rss+xml', 'rss')
changelog_href = make_log_href(path, format='changelog', revs=revs,
stop_rev=stop_rev)
add_link(req, 'alternate', changelog_href, _('ChangeLog'), 'text/plain')
add_ctxtnav(req, _('View Latest Revision'),
href=req.href.browser(repos.reponame or None, path))
if 'next' in req.chrome['links']:
next = req.chrome['links']['next'][0]
add_ctxtnav(req, tag.span(tag.a(_('Older Revisions'),
href=next['href']),
Markup(' →')))
return 'revisionlog.html', data, None
# IWikiSyntaxProvider methods
REV_RANGE = r"(?:%s|%s)" % (Ranges.RE_STR, ChangesetModule.CHANGESET_ID)
# int rev ranges or any kind of rev
def get_wiki_syntax(self):
yield (
# [...] form, starts with optional intertrac: [T... or [trac ...
r"!?\[(?P<it_log>%s\s*)" % WikiParser.INTERTRAC_SCHEME +
# <from>:<to> + optional path restriction
r"(?P<log_revs>%s)(?P<log_path>[/?][^\]]*)?\]" % self.REV_RANGE,
lambda x, y, z: self._format_link(x, 'log1', y[1:-1], y, z))
yield (
# r<from>:<to> form + optional path restriction (no intertrac)
r"(?:\b|!)r%s\b(?:/[a-zA-Z0-9_/+-]+)?" % Ranges.RE_STR,
lambda x, y, z: self._format_link(x, 'log2', '@' + y[1:], y))
def get_link_resolvers(self):
yield ('log', self._format_link)
def _format_link(self, formatter, ns, match, label, fullmatch=None):
if ns == 'log1':
groups = fullmatch.groupdict()
it_log = groups.get('it_log')
revs = groups.get('log_revs')
path = groups.get('log_path') or '/'
target = '%s%s@%s' % (it_log, path, revs)
# prepending it_log is needed, as the helper expects it there
intertrac = formatter.shorthand_intertrac_helper(
'log', target, label, fullmatch)
if intertrac:
return intertrac
path, query, fragment = formatter.split_link(path)
else:
assert ns in ('log', 'log2')
if ns == 'log':
match, query, fragment = formatter.split_link(match)
else:
query = fragment = ''
match = ''.join(reversed(match.split('/', 1)))
path = match
revs = ''
if self.LOG_LINK_RE.match(match):
indexes = [sep in match and match.index(sep) for sep in ':@']
idx = min([i for i in indexes if i is not False])
path, revs = match[:idx], match[idx+1:]
rm = RepositoryManager(self.env)
try:
reponame, repos, path = rm.get_repository_by_path(path)
if not reponame:
reponame = rm.get_default_repository(formatter.context)
if reponame is not None:
repos = rm.get_repository(reponame)
if repos:
revranges = None
if any(c for c in ':-,' if c in revs):
revranges = self._normalize_ranges(repos, path, revs)
revs = None
if 'LOG_VIEW' in formatter.perm:
if revranges:
href = formatter.href.log(repos.reponame or None,
path or '/',
revs=str(revranges))
else:
try:
rev = repos.normalize_rev(revs)
except NoSuchChangeset:
rev = None
href = formatter.href.log(repos.reponame or None,
path or '/', rev=rev)
if query and (revranges or revs):
query = '&' + query[1:]
return tag.a(label, class_='source',
href=href + query + fragment)
errmsg = _("No permission to view change log")
elif reponame:
errmsg = _("Repository '%(repo)s' not found", repo=reponame)
else:
errmsg = _("No default repository defined")
except TracError, e:
errmsg = to_unicode(e)
return tag.a(label, class_='missing source', title=errmsg)
LOG_LINK_RE = re.compile(r"([^@:]*)[@:]%s?" % REV_RANGE)
def _normalize_ranges(self, repos, path, revs):
ranges = revs.replace(':', '-')
try:
# fast path; only numbers
return Ranges(ranges, reorder=True)
except ValueError:
# slow path, normalize each rev
splitted_ranges = re.split(r'([-,])', ranges)
try:
revs = [repos.normalize_rev(r) for r in splitted_ranges[::2]]
except NoSuchChangeset:
return None
seps = splitted_ranges[1::2] + ['']
ranges = ''.join([str(rev)+sep for rev, sep in zip(revs, seps)])
return Ranges(ranges)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/main.py | trac/trac/versioncontrol/web_ui/main.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import pkg_resources
from trac.core import *
from trac.web.chrome import ITemplateProvider
class VersionControlUI(Component):
implements(ITemplateProvider)
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.versioncontrol',
'templates')]
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/browser.py | trac/trac/versioncontrol/web_ui/browser.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2010 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2005-2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
from datetime import datetime, timedelta
from fnmatch import fnmatchcase
import re
from genshi.builder import tag
from trac.config import ListOption, BoolOption, Option
from trac.core import *
from trac.mimeview.api import IHTMLPreviewAnnotator, Mimeview, is_binary
from trac.perm import IPermissionRequestor
from trac.resource import Resource, ResourceNotFound
from trac.util import as_bool, embedded_numbers
from trac.util.compat import cleandoc
from trac.util.datefmt import http_date, to_datetime, utc
from trac.util.html import escape, Markup
from trac.util.text import exception_to_unicode, shorten_line
from trac.util.translation import _, cleandoc_
from trac.web import IRequestHandler, RequestDone
from trac.web.chrome import (INavigationContributor, add_ctxtnav, add_link,
add_script, add_stylesheet, prevnext_nav,
web_context)
from trac.wiki.api import IWikiSyntaxProvider, IWikiMacroProvider, parse_args
from trac.wiki.formatter import format_to_html, format_to_oneliner
from ..api import NoSuchChangeset, RepositoryManager
from trac.versioncontrol.web_ui.util import * # `from .util import *` FIXME 2.6
CHUNK_SIZE = 4096
class IPropertyRenderer(Interface):
"""Render node properties in TracBrowser and TracChangeset views."""
def match_property(name, mode):
"""Indicate whether this renderer can treat the given property
`mode` is the current rendering context, which can be:
- 'browser' rendered in the browser view
- 'changeset' rendered in the changeset view as a node property
- 'revprop' rendered in the changeset view as a revision property
Other identifiers might be used by plugins, so it's advised to simply
ignore unknown modes.
Returns a quality number, ranging from 0 (unsupported) to 9
(''perfect'' match).
"""
def render_property(name, mode, context, props):
"""Render the given property.
`name` is the property name as given to `match()`,
`mode` is the same as for `match_property`,
`context` is the context for the node being render
(useful when the rendering depends on the node kind) and
`props` is the collection of the corresponding properties
(i.e. the `node.get_properties()`).
The rendered result can be one of the following:
- `None`: the property will be skipped
- an `unicode` value: the property will be displayed as text
- a `RenderedProperty` instance: the property will only be displayed
using the instance's `content` attribute, and the other attributes
will also be used in some display contexts (like `revprop`)
- `Markup` or other Genshi content: the property will be displayed
normally, using that content as a block-level markup
"""
class RenderedProperty(object):
def __init__(self, name=None, name_attributes=None,
content=None, content_attributes=None):
self.name = name
self.name_attributes = name_attributes
self.content = content
self.content_attributes = content_attributes
class DefaultPropertyRenderer(Component):
"""Default version control property renderer."""
implements(IPropertyRenderer)
def match_property(self, name, mode):
return 1
def render_property(self, name, mode, context, props):
# No special treatment besides respecting newlines in values.
value = props[name]
if value and '\n' in value:
value = Markup(''.join(['<br />%s' % escape(v)
for v in value.split('\n')]))
return value
class WikiPropertyRenderer(Component):
"""Wiki text property renderer."""
implements(IPropertyRenderer)
wiki_properties = ListOption('browser', 'wiki_properties',
'trac:description',
doc="""Comma-separated list of version control properties to render
as wiki content in the repository browser.
(''since 0.11'')""")
oneliner_properties = ListOption('browser', 'oneliner_properties',
'trac:summary',
doc="""Comma-separated list of version control properties to render
as oneliner wiki content in the repository browser.
(''since 0.11'')""")
def match_property(self, name, mode):
return 4 if name in self.wiki_properties \
or name in self.oneliner_properties else 0
def render_property(self, name, mode, context, props):
if name in self.wiki_properties:
return format_to_html(self.env, context, props[name])
else:
return format_to_oneliner(self.env, context, props[name])
class TimeRange(object):
min = datetime(1, 1, 1, 0, 0, 0, 0, utc) # tz aware version of datetime.min
def __init__(self, base):
self.oldest = self.newest = base
self._total = None
def seconds_between(self, dt1, dt2):
delta = dt1 - dt2
return delta.days * 24 * 3600 + delta.seconds
def to_seconds(self, dt):
return self.seconds_between(dt, TimeRange.min)
def from_seconds(self, secs):
return TimeRange.min + timedelta(*divmod(secs, 24* 3600))
def relative(self, datetime):
if self._total is None:
self._total = float(self.seconds_between(self.newest, self.oldest))
age = 1.0
if self._total:
age = self.seconds_between(datetime, self.oldest) / self._total
return age
def insert(self, datetime):
self._total = None
self.oldest = min(self.oldest, datetime)
self.newest = max(self.newest, datetime)
class BrowserModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
IWikiSyntaxProvider, IHTMLPreviewAnnotator,
IWikiMacroProvider)
property_renderers = ExtensionPoint(IPropertyRenderer)
downloadable_paths = ListOption('browser', 'downloadable_paths',
'/trunk, /branches/*, /tags/*',
doc="""List of repository paths that can be downloaded.
Leave this option empty if you want to disable all downloads, otherwise
set it to a comma-separated list of authorized paths (those paths are
glob patterns, i.e. "*" can be used as a wild card). In a
multi-repository environment, the path must be qualified with the
repository name if the path does not point to the default repository
(e.g. /reponame/trunk). Note that a simple prefix matching is
performed on the paths, so aliases won't get automatically resolved.
(''since 0.10'')""")
color_scale = BoolOption('browser', 'color_scale', True,
doc="""Enable colorization of the ''age'' column.
This uses the same color scale as the source code annotation:
blue is older, red is newer.
(''since 0.11'')""")
NEWEST_COLOR = (255, 136, 136)
newest_color = Option('browser', 'newest_color', repr(NEWEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the newest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
(''since 0.11'')""")
OLDEST_COLOR = (136, 136, 255)
oldest_color = Option('browser', 'oldest_color', repr(OLDEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the oldest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
(''since 0.11'')""")
intermediate_point = Option('browser', 'intermediate_point', '',
doc="""If set to a value between 0 and 1 (exclusive), this will be the
point chosen to set the `intermediate_color` for interpolating
the color value.
(''since 0.11'')""")
intermediate_color = Option('browser', 'intermediate_color', '',
doc="""(r,g,b) color triple to use for the color corresponding
to the intermediate color, if two linear interpolations are used
for the color scale (see `intermediate_point`).
If not set, the intermediate color between `oldest_color` and
`newest_color` will be used.
(''since 0.11'')""")
render_unsafe_content = BoolOption('browser', 'render_unsafe_content',
'false',
"""Whether raw files should be rendered in the browser, or only made
downloadable.
Pretty much any file may be interpreted as HTML by the browser,
which allows a malicious user to create a file containing cross-site
scripting attacks.
For open repositories where anyone can check-in a file, it is
recommended to leave this option disabled (which is the default).""")
hidden_properties = ListOption('browser', 'hide_properties', 'svk:merge',
doc="""Comma-separated list of version control properties to hide from
the repository browser.
(''since 0.9'')""")
# public methods
def get_custom_colorizer(self):
"""Returns a converter for values from [0.0, 1.0] to a RGB triple."""
def interpolate(old, new, value):
# Provides a linearly interpolated color triple for `value`
# which must be a floating point value between 0.0 and 1.0
return tuple([int(b + (a - b) * value) for a, b in zip(new, old)])
def parse_color(rgb, default):
# Get three ints out of a `rgb` string or return `default`
try:
t = tuple([int(v) for v in re.split(r'(\d+)', rgb)[1::2]])
return t if len(t) == 3 else default
except ValueError:
return default
newest_color = parse_color(self.newest_color, self.NEWEST_COLOR)
oldest_color = parse_color(self.oldest_color, self.OLDEST_COLOR)
try:
intermediate = float(self.intermediate_point)
except ValueError:
intermediate = None
if intermediate:
intermediate_color = parse_color(self.intermediate_color, None)
if not intermediate_color:
intermediate_color = tuple([(a + b) / 2 for a, b in
zip(newest_color, oldest_color)])
def colorizer(value):
if value <= intermediate:
value = value / intermediate
return interpolate(oldest_color, intermediate_color, value)
else:
value = (value - intermediate) / (1.0 - intermediate)
return interpolate(intermediate_color, newest_color, value)
else:
def colorizer(value):
return interpolate(oldest_color, newest_color, value)
return colorizer
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'browser'
def get_navigation_items(self, req):
rm = RepositoryManager(self.env)
if 'BROWSER_VIEW' in req.perm and rm.get_real_repositories():
yield ('mainnav', 'browser',
tag.a(_('Browse Source'), href=req.href.browser()))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['BROWSER_VIEW', 'FILE_VIEW']
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/(export|browser|file)(/.*)?$', req.path_info)
if match:
mode, path = match.groups()
if mode == 'export':
if path and '/' in path:
path_elts = path.split('/', 2)
if len(path_elts) != 3:
return False
path = path_elts[2]
req.args['rev'] = path_elts[1]
req.args['format'] = 'raw'
elif mode == 'file':
req.redirect(req.href.browser(path, rev=req.args.get('rev'),
format=req.args.get('format')),
permanent=True)
req.args['path'] = path or '/'
return True
def process_request(self, req):
req.perm.require('BROWSER_VIEW')
presel = req.args.get('preselected')
if presel and (presel + '/').startswith(req.href.browser() + '/'):
req.redirect(presel)
path = req.args.get('path', '/')
rev = req.args.get('rev', '')
if rev.lower() in ('', 'head'):
rev = None
order = req.args.get('order', 'name').lower()
desc = req.args.has_key('desc')
xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest'
rm = RepositoryManager(self.env)
all_repositories = rm.get_all_repositories()
reponame, repos, path = rm.get_repository_by_path(path)
# Repository index
show_index = not reponame and path == '/'
if show_index:
if repos and (as_bool(all_repositories[''].get('hidden'))
or not repos.is_viewable(req.perm)):
repos = None
if not repos and reponame:
raise ResourceNotFound(_("Repository '%(repo)s' not found",
repo=reponame))
if reponame and reponame != repos.reponame: # Redirect alias
qs = req.query_string
req.redirect(req.href.browser(repos.reponame or None, path)
+ ('?' + qs if qs else ''))
reponame = repos.reponame if repos else None
# Find node for the requested path/rev
context = web_context(req)
node = None
display_rev = lambda rev: rev
if repos:
try:
if rev:
rev = repos.normalize_rev(rev)
# If `rev` is `None`, we'll try to reuse `None` consistently,
# as a special shortcut to the latest revision.
rev_or_latest = rev or repos.youngest_rev
node = get_existing_node(req, repos, path, rev_or_latest)
except NoSuchChangeset, e:
raise ResourceNotFound(e.message,
_('Invalid changeset number'))
context = context.child(repos.resource.child('source', path,
version=rev_or_latest))
display_rev = repos.display_rev
# Prepare template data
path_links = get_path_links(req.href, reponame, path, rev,
order, desc)
repo_data = dir_data = file_data = None
if show_index:
repo_data = self._render_repository_index(
context, all_repositories, order, desc)
if node:
if node.isdir:
dir_data = self._render_dir(req, repos, node, rev, order, desc)
elif node.isfile:
file_data = self._render_file(req, context, repos, node, rev)
if not repos and not (repo_data and repo_data['repositories']):
raise ResourceNotFound(_("No node %(path)s", path=path))
quickjump_data = properties_data = None
if node and not xhr:
properties_data = self.render_properties(
'browser', context, node.get_properties())
quickjump_data = list(repos.get_quickjump_entries(rev))
data = {
'context': context, 'reponame': reponame, 'repos': repos,
'repoinfo': all_repositories.get(reponame or ''),
'path': path, 'rev': node and node.rev, 'stickyrev': rev,
'display_rev': display_rev,
'created_path': node and node.created_path,
'created_rev': node and node.created_rev,
'properties': properties_data,
'path_links': path_links,
'order': order, 'desc': 1 if desc else None,
'repo': repo_data, 'dir': dir_data, 'file': file_data,
'quickjump_entries': quickjump_data,
'wiki_format_messages': \
self.config['changeset'].getbool('wiki_format_messages'),
'xhr': xhr,
}
if xhr: # render and return the content only
return 'dir_entries.html', data, None
if dir_data or repo_data:
add_script(req, 'common/js/expand_dir.js')
add_script(req, 'common/js/keyboard_nav.js')
# Links for contextual navigation
if node:
if node.isfile:
prev_rev = repos.previous_rev(rev=node.created_rev,
path=node.created_path)
if prev_rev:
href = req.href.browser(reponame,
node.created_path, rev=prev_rev)
add_link(req, 'prev', href,
_('Revision %(num)s', num=display_rev(prev_rev)))
if rev is not None:
add_link(req, 'up', req.href.browser(reponame,
node.created_path))
next_rev = repos.next_rev(rev=node.created_rev,
path=node.created_path)
if next_rev:
href = req.href.browser(reponame, node.created_path,
rev=next_rev)
add_link(req, 'next', href,
_('Revision %(num)s', num=display_rev(next_rev)))
prevnext_nav(req, _('Previous Revision'), _('Next Revision'),
_('Latest Revision'))
else:
if path != '/':
add_link(req, 'up', path_links[-2]['href'],
_('Parent directory'))
add_ctxtnav(req, tag.a(_('Last Change'),
href=req.href.changeset(node.created_rev, reponame,
node.created_path)))
if node.isfile:
annotate = data['file']['annotate']
if annotate:
add_ctxtnav(req, _('Normal'),
title=_('View file without annotations'),
href=req.href.browser(reponame,
node.created_path,
rev=rev))
if annotate != 'blame':
add_ctxtnav(req, _('Blame'),
title=_('Annotate each line with the last '
'changed revision '
'(this can be time consuming...)'),
href=req.href.browser(reponame,
node.created_path,
rev=rev,
annotate='blame'))
add_ctxtnav(req, _('Revision Log'),
href=req.href.log(reponame, path, rev=rev))
path_url = repos.get_path_url(path, rev)
if path_url:
if path_url.startswith('//'):
path_url = req.scheme + ':' + path_url
add_ctxtnav(req, _('Repository URL'), href=path_url)
add_stylesheet(req, 'common/css/browser.css')
return 'browser.html', data, None
# Internal methods
def _render_repository_index(self, context, all_repositories, order, desc):
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
custom_colorizer = self.get_custom_colorizer()
rm = RepositoryManager(self.env)
repositories = []
for reponame, repoinfo in all_repositories.iteritems():
if not reponame or as_bool(repoinfo.get('hidden')):
continue
try:
repos = rm.get_repository(reponame)
if repos:
if not repos.is_viewable(context.perm):
continue
try:
youngest = repos.get_changeset(repos.youngest_rev)
except NoSuchChangeset:
youngest = None
if self.color_scale and youngest:
if not timerange:
timerange = TimeRange(youngest.date)
else:
timerange.insert(youngest.date)
raw_href = self._get_download_href(context.href, repos,
None, None)
entry = (reponame, repoinfo, repos, youngest, None,
raw_href)
else:
entry = (reponame, repoinfo, None, None, u"\u2013", None)
except TracError, err:
entry = (reponame, repoinfo, None, None,
exception_to_unicode(err), None)
if entry[-1] is not None: # Check permission in case of error
root = Resource('repository', reponame).child('source', '/')
if 'BROWSER_VIEW' not in context.perm(root):
continue
repositories.append(entry)
# Ordering of repositories
if order == 'date':
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return (youngest.date if youngest else to_datetime(0),
embedded_numbers(reponame.lower()))
elif order == 'author':
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return (youngest.author.lower() if youngest else '',
embedded_numbers(reponame.lower()))
else:
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return embedded_numbers(reponame.lower())
repositories = sorted(repositories, key=repo_order, reverse=desc)
return {'repositories' : repositories,
'timerange': timerange, 'colorize_age': custom_colorizer}
def _render_dir(self, req, repos, node, rev, order, desc):
req.perm(node.resource).require('BROWSER_VIEW')
download_href = self._get_download_href
# Entries metadata
class entry(object):
_copy = 'name rev created_rev kind isdir path content_length' \
.split()
__slots__ = _copy + ['raw_href']
def __init__(self, node):
for f in entry._copy:
setattr(self, f, getattr(node, f))
self.raw_href = download_href(req.href, repos, node, rev)
entries = [entry(n) for n in node.get_entries()
if n.is_viewable(req.perm)]
changes = get_changes(repos, [i.created_rev for i in entries],
self.log)
if rev:
newest = repos.get_changeset(rev).date
else:
newest = datetime.now(req.tz)
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
timerange = TimeRange(newest)
max_s = req.args.get('range_max_secs')
min_s = req.args.get('range_min_secs')
parent_range = [timerange.from_seconds(long(s))
for s in [max_s, min_s] if s]
this_range = [c.date for c in changes.values() if c]
for dt in this_range + parent_range:
timerange.insert(dt)
custom_colorizer = self.get_custom_colorizer()
# Ordering of entries
if order == 'date':
def file_order(a):
return (changes[a.created_rev].date,
embedded_numbers(a.name.lower()))
elif order == 'size':
def file_order(a):
return (a.content_length,
embedded_numbers(a.name.lower()))
elif order == 'author':
def file_order(a):
return (changes[a.created_rev].author.lower(),
embedded_numbers(a.name.lower()))
else:
def file_order(a):
return embedded_numbers(a.name.lower())
dir_order = 1 if desc else -1
def browse_order(a):
return dir_order if a.isdir else 0, file_order(a)
entries = sorted(entries, key=browse_order, reverse=desc)
# ''Zip Archive'' alternate link
zip_href = self._get_download_href(req.href, repos, node, rev)
if zip_href:
add_link(req, 'alternate', zip_href, _('Zip Archive'),
'application/zip', 'zip')
return {'entries': entries, 'changes': changes,
'timerange': timerange, 'colorize_age': custom_colorizer,
'range_max_secs': (timerange and
timerange.to_seconds(timerange.newest)),
'range_min_secs': (timerange and
timerange.to_seconds(timerange.oldest)),
}
def _render_file(self, req, context, repos, node, rev=None):
req.perm(node.resource).require('FILE_VIEW')
mimeview = Mimeview(self.env)
# MIME type detection
content = node.get_content()
chunk = content.read(CHUNK_SIZE)
mime_type = node.content_type
if not mime_type or mime_type == 'application/octet-stream':
mime_type = mimeview.get_mimetype(node.name, chunk) or \
mime_type or 'text/plain'
# Eventually send the file directly
format = req.args.get('format')
if format in ('raw', 'txt'):
req.send_response(200)
req.send_header('Content-Type',
'text/plain' if format == 'txt' else mime_type)
req.send_header('Content-Length', node.content_length)
req.send_header('Last-Modified', http_date(node.last_modified))
if rev is None:
req.send_header('Pragma', 'no-cache')
req.send_header('Cache-Control', 'no-cache')
req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
if not self.render_unsafe_content:
# Force browser to download files instead of rendering
# them, since they might contain malicious code enabling
# XSS attacks
req.send_header('Content-Disposition', 'attachment')
req.end_headers()
while 1:
if not chunk:
raise RequestDone
req.write(chunk)
chunk = content.read(CHUNK_SIZE)
else:
# The changeset corresponding to the last change on `node`
# is more interesting than the `rev` changeset.
changeset = repos.get_changeset(node.created_rev)
# add ''Plain Text'' alternate link if needed
if not is_binary(chunk) and mime_type != 'text/plain':
plain_href = req.href.browser(repos.reponame or None,
node.path, rev=rev, format='txt')
add_link(req, 'alternate', plain_href, _('Plain Text'),
'text/plain')
# add ''Original Format'' alternate link (always)
raw_href = req.href.export(rev or repos.youngest_rev,
repos.reponame or None, node.path)
add_link(req, 'alternate', raw_href, _('Original Format'),
mime_type)
self.log.debug("Rendering preview of node %s@%s with mime-type %s"
% (node.name, str(rev), mime_type))
del content # the remainder of that content is not needed
add_stylesheet(req, 'common/css/code.css')
annotations = ['lineno']
annotate = req.args.get('annotate')
if annotate:
annotations.insert(0, annotate)
preview_data = mimeview.preview_data(context, node.get_content(),
node.get_content_length(),
mime_type, node.created_path,
raw_href,
annotations=annotations,
force_source=bool(annotate))
return {
'changeset': changeset,
'size': node.content_length,
'preview': preview_data,
'annotate': annotate,
}
def _get_download_href(self, href, repos, node, rev):
"""Return the URL for downloading a file, or a directory as a ZIP."""
if node is not None and node.isfile:
return href.export(rev or 'HEAD', repos.reponame or None,
node.path)
path = npath = '' if node is None else node.path.strip('/')
if repos.reponame:
path = (repos.reponame + '/' + npath).rstrip('/')
if any(fnmatchcase(path, p.strip('/'))
for p in self.downloadable_paths):
return href.changeset(rev or repos.youngest_rev,
repos.reponame or None, npath,
old=rev, old_path=repos.reponame or '/',
format='zip')
# public methods
def render_properties(self, mode, context, props):
"""Prepare rendering of a collection of properties."""
return filter(None, [self.render_property(name, mode, context, props)
for name in sorted(props)])
def render_property(self, name, mode, context, props):
"""Renders a node property to HTML."""
if name in self.hidden_properties:
return
candidates = []
for renderer in self.property_renderers:
quality = renderer.match_property(name, mode)
if quality > 0:
candidates.append((quality, renderer))
candidates.sort(reverse=True)
for (quality, renderer) in candidates:
try:
rendered = renderer.render_property(name, mode, context, props)
if not rendered:
return rendered
if isinstance(rendered, RenderedProperty):
value = rendered.content
else:
value = rendered
rendered = None
prop = {'name': name, 'value': value, 'rendered': rendered}
return prop
except Exception, e:
self.log.warning('Rendering failed for property %s with '
'renderer %s: %s', name,
renderer.__class__.__name__,
exception_to_unicode(e, traceback=True))
# IWikiSyntaxProvider methods
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
"""TracBrowser link resolvers.
- `source:` and `browser:`
* simple paths (/dir/file)
* paths at a given revision (/dir/file@234)
* paths with line number marks (/dir/file@234:10,20-30)
* paths with line number anchor (/dir/file@234#L100)
Marks and anchor can be combined.
The revision must be present when specifying line numbers.
In the few cases where it would be redundant (e.g. for tags), the
revision number itself can be omitted: /tags/v10/file@100-110#L99
"""
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/__init__.py | trac/trac/versioncontrol/web_ui/__init__.py | from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
from trac.versioncontrol.web_ui.main import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/changeset.py | trac/trac/versioncontrol/web_ui/changeset.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
# Christian Boos <cboos@edgewall.org>
from __future__ import with_statement
from itertools import groupby
import os
import posixpath
import re
from StringIO import StringIO
from genshi.builder import tag
from trac.config import Option, BoolOption, IntOption
from trac.core import *
from trac.mimeview.api import Mimeview
from trac.perm import IPermissionRequestor
from trac.resource import Resource, ResourceNotFound
from trac.search import ISearchSource, search_to_sql, shorten_result
from trac.timeline.api import ITimelineEventProvider
from trac.util import as_bool, content_disposition, embedded_numbers, pathjoin
from trac.util.datefmt import from_utimestamp, pretty_timedelta
from trac.util.text import exception_to_unicode, to_unicode, \
unicode_urlencode, shorten_line, CRLF
from trac.util.translation import _, ngettext
from trac.versioncontrol.api import RepositoryManager, Changeset, Node, \
NoSuchChangeset
from trac.versioncontrol.diff import get_diff_options, diff_blocks, \
unified_diff
from trac.versioncontrol.web_ui.browser import BrowserModule
from trac.web import IRequestHandler, RequestDone
from trac.web.chrome import (Chrome, INavigationContributor, add_ctxtnav,
add_link, add_script, add_stylesheet,
prevnext_nav, web_context)
from trac.wiki import IWikiSyntaxProvider, WikiParser
from trac.wiki.formatter import format_to
class IPropertyDiffRenderer(Interface):
"""Render node properties in TracBrowser and TracChangeset views."""
def match_property_diff(name):
"""Indicate whether this renderer can treat the given property diffs
Returns a quality number, ranging from 0 (unsupported) to 9
(''perfect'' match).
"""
def render_property_diff(name, old_context, old_props,
new_context, new_props, options):
"""Render the given diff of property to HTML.
`name` is the property name as given to `match_property_diff()`,
`old_context` corresponds to the old node being render
(useful when the rendering depends on the node kind)
and `old_props` is the corresponding collection of all properties.
Same for `new_node` and `new_props`.
`options` are the current diffs options.
The rendered result can be one of the following:
- `None`: the property change will be shown the normal way
(''changed from `old` to `new`'')
- an `unicode` value: the change will be shown as textual content
- `Markup` or other Genshi content: the change will shown as block
markup
"""
class DefaultPropertyDiffRenderer(Component):
"""Default version control property difference renderer."""
implements(IPropertyDiffRenderer)
def match_property_diff(self, name):
return 1
def render_property_diff(self, name, old_context, old_props,
new_context, new_props, options):
old, new = old_props[name], new_props[name]
# Render as diff only if multiline (see #3002)
if '\n' not in old and '\n' not in new:
return None
unidiff = '--- \n+++ \n' + \
'\n'.join(unified_diff(old.splitlines(), new.splitlines(),
options.get('contextlines', 3)))
return tag.li('Property ', tag.strong(name),
Mimeview(self.env).render(old_context, 'text/x-diff',
unidiff))
class ChangesetModule(Component):
"""Renderer providing flexible functionality for showing sets of
differences.
If the differences shown are coming from a specific changeset,
then that changeset information can be shown too.
In addition, it is possible to show only a subset of the changeset:
Only the changes affecting a given path will be shown. This is called
the ''restricted'' changeset.
But the differences can also be computed in a more general way,
between two arbitrary paths and/or between two arbitrary revisions.
In that case, there's no changeset information displayed.
"""
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
ITimelineEventProvider, IWikiSyntaxProvider, ISearchSource)
property_diff_renderers = ExtensionPoint(IPropertyDiffRenderer)
timeline_show_files = Option('timeline', 'changeset_show_files', '0',
"""Number of files to show (`-1` for unlimited, `0` to disable).
This can also be `location`, for showing the common prefix for the
changed files. (since 0.11).
""")
timeline_long_messages = BoolOption('timeline', 'changeset_long_messages',
'false',
"""Whether wiki-formatted changeset messages should be multiline or
not.
If this option is not specified or is false and `wiki_format_messages`
is set to true, changeset messages will be single line only, losing
some formatting (bullet points, etc).""")
timeline_collapse = BoolOption('timeline', 'changeset_collapse_events',
'false',
"""Whether consecutive changesets from the same author having
exactly the same message should be presented as one event.
That event will link to the range of changesets in the log view.
(''since 0.11'')""")
max_diff_files = IntOption('changeset', 'max_diff_files', 0,
"""Maximum number of modified files for which the changeset view will
attempt to show the diffs inlined (''since 0.10'').""")
max_diff_bytes = IntOption('changeset', 'max_diff_bytes', 10000000,
"""Maximum total size in bytes of the modified files (their old size
plus their new size) for which the changeset view will attempt to show
the diffs inlined (''since 0.10'').""")
wiki_format_messages = BoolOption('changeset', 'wiki_format_messages',
'true',
"""Whether wiki formatting should be applied to changeset messages.
If this option is disabled, changeset messages will be rendered as
pre-formatted text.""")
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'browser'
def get_navigation_items(self, req):
return []
# IPermissionRequestor methods
def get_permission_actions(self):
return ['CHANGESET_VIEW']
# IRequestHandler methods
_request_re = re.compile(r"/changeset(?:/([^/]+)(/.*)?)?$")
def match_request(self, req):
match = re.match(self._request_re, req.path_info)
if match:
new, new_path = match.groups()
if new:
req.args['new'] = new
if new_path:
req.args['new_path'] = new_path
return True
def process_request(self, req):
"""The appropriate mode of operation is inferred from the request
parameters:
* If `new_path` and `old_path` are equal (or `old_path` is omitted)
and `new` and `old` are equal (or `old` is omitted),
then we're about to view a revision Changeset: `chgset` is True.
Furthermore, if the path is not the root, the changeset is
''restricted'' to that path (only the changes affecting that path,
its children or its ancestor directories will be shown).
* In any other case, the set of changes corresponds to arbitrary
differences between path@rev pairs. If `new_path` and `old_path`
are equal, the ''restricted'' flag will also be set, meaning in this
case that the differences between two revisions are restricted to
those occurring on that path.
In any case, either path@rev pairs must exist.
"""
req.perm.require('CHANGESET_VIEW')
# -- retrieve arguments
full_new_path = new_path = req.args.get('new_path')
new = req.args.get('new')
full_old_path = old_path = req.args.get('old_path')
old = req.args.get('old')
reponame = req.args.get('reponame')
xhr = req.get_header('X-Requested-With') == 'XMLHttpRequest'
# -- support for the revision log ''View changes'' form,
# where we need to give the path and revision at the same time
if old and '@' in old:
old, old_path = old.split('@', 1)
if new and '@' in new:
new, new_path = new.split('@', 1)
rm = RepositoryManager(self.env)
if reponame:
repos = rm.get_repository(reponame)
else:
reponame, repos, new_path = rm.get_repository_by_path(new_path)
if old_path:
old_reponame, old_repos, old_path = \
rm.get_repository_by_path(old_path)
if old_repos != repos:
raise TracError(_("Can't compare across different "
"repositories: %(old)s vs. %(new)s",
old=old_reponame, new=reponame))
if not repos:
if reponame or (new_path and new_path != '/'):
raise TracError(_("Repository '%(repo)s' not found",
repo=reponame or new_path.strip('/')))
else:
raise TracError(_("No repository specified and no default "
"repository configured."))
# -- normalize and check for special case
try:
new_path = repos.normalize_path(new_path)
new = repos.normalize_rev(new)
full_new_path = '/' + pathjoin(repos.reponame, new_path)
old_path = repos.normalize_path(old_path or new_path)
old = repos.normalize_rev(old or new)
full_old_path = '/' + pathjoin(repos.reponame, old_path)
except NoSuchChangeset, e:
raise ResourceNotFound(e.message, _('Invalid Changeset Number'))
if old_path == new_path and old == new: # revert to Changeset
old_path = old = None
style, options, diff_data = get_diff_options(req)
diff_opts = diff_data['options']
# -- setup the `chgset` and `restricted` flags, see docstring above.
chgset = not old and not old_path
if chgset:
restricted = new_path not in ('', '/') # (subset or not)
else:
restricted = old_path == new_path # (same path or not)
# -- redirect if changing the diff options or alias requested
if 'update' in req.args or reponame != repos.reponame:
contextall = diff_opts['contextall'] or None
reponame = repos.reponame or None
if chgset:
if restricted:
req.redirect(req.href.changeset(new, reponame, new_path,
contextall=contextall))
else:
req.redirect(req.href.changeset(new, reponame,
contextall=contextall))
else:
req.redirect(req.href.changeset(new, reponame,
new_path, old=old,
old_path=full_old_path,
contextall=contextall))
# -- preparing the data
if chgset:
prev = repos.get_node(new_path, new).get_previous()
if prev:
prev_path, prev_rev = prev[:2]
else:
prev_path, prev_rev = new_path, repos.previous_rev(new)
data = {'old_path': prev_path, 'old_rev': prev_rev,
'new_path': new_path, 'new_rev': new}
else:
if not new:
new = repos.youngest_rev
elif not old:
old = repos.youngest_rev
if not old_path:
old_path = new_path
data = {'old_path': old_path, 'old_rev': old,
'new_path': new_path, 'new_rev': new}
data.update({'repos': repos, 'reponame': repos.reponame or None,
'diff': diff_data,
'wiki_format_messages': self.wiki_format_messages})
if chgset:
chgset = repos.get_changeset(new)
req.perm(chgset.resource).require('CHANGESET_VIEW')
# TODO: find a cheaper way to reimplement r2636
req.check_modified(chgset.date, [
style, ''.join(options), repos.name,
diff_opts['contextlines'], diff_opts['contextall'],
repos.rev_older_than(new, repos.youngest_rev),
chgset.message, xhr,
pretty_timedelta(chgset.date, None, 3600)])
format = req.args.get('format')
if format in ['diff', 'zip']:
# choosing an appropriate filename
rpath = new_path.replace('/','_')
if chgset:
if restricted:
filename = 'changeset_%s_%s' % (rpath, new)
else:
filename = 'changeset_%s' % new
else:
if restricted:
filename = 'diff-%s-from-%s-to-%s' \
% (rpath, old, new)
elif old_path == '/': # special case for download (#238)
filename = '%s-%s' % (rpath, old)
else:
filename = 'diff-from-%s-%s-to-%s-%s' \
% (old_path.replace('/','_'), old, rpath, new)
if format == 'diff':
self._render_diff(req, filename, repos, data)
elif format == 'zip':
self._render_zip(req, filename, repos, data)
# -- HTML format
self._render_html(req, repos, chgset, restricted, xhr, data)
if chgset:
diff_params = 'new=%s' % new
else:
diff_params = unicode_urlencode({
'new_path': full_new_path, 'new': new,
'old_path': full_old_path, 'old': old})
add_link(req, 'alternate', '?format=diff&' + diff_params,
_('Unified Diff'), 'text/plain', 'diff')
add_link(req, 'alternate', '?format=zip&' + diff_params,
_('Zip Archive'), 'application/zip', 'zip')
add_script(req, 'common/js/diff.js')
add_stylesheet(req, 'common/css/changeset.css')
add_stylesheet(req, 'common/css/diff.css')
add_stylesheet(req, 'common/css/code.css')
if chgset:
if restricted:
prevnext_nav(req, _('Previous Change'), _('Next Change'))
else:
prevnext_nav(req, _('Previous Changeset'), _('Next Changeset'))
else:
rev_href = req.href.changeset(old, full_old_path,
old=new, old_path=full_new_path)
add_ctxtnav(req, _('Reverse Diff'), href=rev_href)
return 'changeset.html', data, None
# Internal methods
def _render_html(self, req, repos, chgset, restricted, xhr, data):
"""HTML version"""
data['restricted'] = restricted
display_rev = repos.display_rev
data['display_rev'] = display_rev
browser = BrowserModule(self.env)
reponame = repos.reponame or None
if chgset: # Changeset Mode (possibly restricted on a path)
path, rev = data['new_path'], data['new_rev']
# -- getting the change summary from the Changeset.get_changes
def get_changes():
for npath, kind, change, opath, orev in chgset.get_changes():
old_node = new_node = None
if (restricted and
not (npath == path or # same path
npath.startswith(path + '/') or # npath is below
path.startswith(npath + '/'))): # npath is above
continue
if change != Changeset.ADD:
old_node = repos.get_node(opath, orev)
if change != Changeset.DELETE:
new_node = repos.get_node(npath, rev)
else:
# support showing paths deleted below a copy target
old_node.path = npath
yield old_node, new_node, kind, change
def _changeset_title(rev):
rev = display_rev(rev)
if restricted:
return _('Changeset %(id)s for %(path)s', id=rev,
path=path)
else:
return _('Changeset %(id)s', id=rev)
data['changeset'] = chgset
title = _changeset_title(rev)
# Support for revision properties (#2545)
context = web_context(req, 'changeset', chgset.rev,
parent=repos.resource)
data['context'] = context
revprops = chgset.get_properties()
data['properties'] = browser.render_properties('revprop', context,
revprops)
oldest_rev = repos.oldest_rev
if chgset.rev != oldest_rev:
if restricted:
prev = repos.get_node(path, rev).get_previous()
if prev:
prev_path, prev_rev = prev[:2]
if prev_rev:
prev_href = req.href.changeset(prev_rev, reponame,
prev_path)
else:
prev_path = prev_rev = None
else:
add_link(req, 'first',
req.href.changeset(oldest_rev, reponame),
_('Changeset %(id)s', id=display_rev(oldest_rev)))
prev_path = data['old_path']
prev_rev = repos.previous_rev(chgset.rev)
if prev_rev:
prev_href = req.href.changeset(prev_rev, reponame)
if prev_rev:
add_link(req, 'prev', prev_href,
_changeset_title(prev_rev))
youngest_rev = repos.youngest_rev
if str(chgset.rev) != str(youngest_rev):
if restricted:
next_rev = repos.next_rev(chgset.rev, path)
if next_rev:
if repos.has_node(path, next_rev):
next_href = req.href.changeset(next_rev, reponame,
path)
else: # must be a 'D'elete or 'R'ename, show full cset
next_href = req.href.changeset(next_rev, reponame)
else:
add_link(req, 'last',
req.href.changeset(youngest_rev, reponame),
_('Changeset %(id)s',
id=display_rev(youngest_rev)))
next_rev = repos.next_rev(chgset.rev)
if next_rev:
next_href = req.href.changeset(next_rev, reponame)
if next_rev:
add_link(req, 'next', next_href,
_changeset_title(next_rev))
else: # Diff Mode
# -- getting the change summary from the Repository.get_changes
def get_changes():
for d in repos.get_changes(
new_path=data['new_path'], new_rev=data['new_rev'],
old_path=data['old_path'], old_rev=data['old_rev']):
yield d
title = self.title_for_diff(data)
data['changeset'] = False
data['title'] = title
if 'BROWSER_VIEW' not in req.perm:
return
def node_info(node, annotated):
href = req.href.browser(
reponame, node.created_path, rev=node.created_rev,
annotate='blame' if annotated else None)
title = _('Show revision %(rev)s of this file in browser',
rev=display_rev(node.rev))
return {'path': node.path, 'rev': node.rev,
'shortrev': repos.short_rev(node.rev),
'href': href, 'title': title}
# Reminder: node.path may not exist at node.rev
# as long as node.rev==node.created_rev
# ... and data['old_rev'] may have nothing to do
# with _that_ node specific history...
options = data['diff']['options']
def _prop_changes(old_node, new_node):
old_props = old_node.get_properties()
new_props = new_node.get_properties()
old_ctx = web_context(req, old_node.resource)
new_ctx = web_context(req, new_node.resource)
changed_properties = []
if old_props != new_props:
for k, v in sorted(old_props.items()):
new = old = diff = None
if not k in new_props:
old = v # won't be displayed, no need to render it
elif v != new_props[k]:
diff = self.render_property_diff(
k, old_ctx, old_props, new_ctx, new_props, options)
if not diff:
old = browser.render_property(k, 'changeset',
old_ctx, old_props)
new = browser.render_property(k, 'changeset',
new_ctx, new_props)
if new or old or diff:
changed_properties.append({'name': k, 'old': old,
'new': new, 'diff': diff})
for k, v in sorted(new_props.items()):
if not k in old_props:
new = browser.render_property(k, 'changeset',
new_ctx, new_props)
if new is not None:
changed_properties.append({'name': k, 'new': new,
'old': None})
return changed_properties
def _estimate_changes(old_node, new_node):
old_size = old_node.get_content_length()
new_size = new_node.get_content_length()
return old_size + new_size
def _content_changes(old_node, new_node):
"""Returns the list of differences.
The list is empty when no differences between comparable files
are detected, but the return value is None for non-comparable
files.
"""
mview = Mimeview(self.env)
if mview.is_binary(old_node.content_type, old_node.path):
return None
if mview.is_binary(new_node.content_type, new_node.path):
return None
old_content = old_node.get_content().read()
if mview.is_binary(content=old_content):
return None
new_content = new_node.get_content().read()
if mview.is_binary(content=new_content):
return None
old_content = mview.to_unicode(old_content, old_node.content_type)
new_content = mview.to_unicode(new_content, new_node.content_type)
if old_content != new_content:
context = options.get('contextlines', 3)
if context < 0 or options.get('contextall'):
context = None
tabwidth = self.config['diff'].getint('tab_width') or \
self.config['mimeviewer'].getint('tab_width', 8)
ignore_blank_lines = options.get('ignoreblanklines')
ignore_case = options.get('ignorecase')
ignore_space = options.get('ignorewhitespace')
return diff_blocks(old_content.splitlines(),
new_content.splitlines(),
context, tabwidth,
ignore_blank_lines=ignore_blank_lines,
ignore_case=ignore_case,
ignore_space_changes=ignore_space)
else:
return []
diff_bytes = diff_files = 0
if self.max_diff_bytes or self.max_diff_files:
for old_node, new_node, kind, change in get_changes():
if change in Changeset.DIFF_CHANGES and kind == Node.FILE \
and old_node.is_viewable(req.perm) \
and new_node.is_viewable(req.perm):
diff_files += 1
diff_bytes += _estimate_changes(old_node, new_node)
show_diffs = (not self.max_diff_files or \
0 < diff_files <= self.max_diff_files) and \
(not self.max_diff_bytes or \
diff_bytes <= self.max_diff_bytes or \
diff_files == 1)
# XHR is used for blame support: display the changeset view without
# the navigation and with the changes concerning the annotated file
annotated = False
if xhr:
show_diffs = False
annotated = repos.normalize_path(req.args.get('annotate'))
has_diffs = False
filestats = self._prepare_filestats()
changes = []
files = []
for old_node, new_node, kind, change in get_changes():
props = []
diffs = []
show_old = old_node and old_node.is_viewable(req.perm)
show_new = new_node and new_node.is_viewable(req.perm)
show_entry = change != Changeset.EDIT
show_diff = show_diffs or (new_node and new_node.path == annotated)
if change in Changeset.DIFF_CHANGES and show_old and show_new:
assert old_node and new_node
props = _prop_changes(old_node, new_node)
if props:
show_entry = True
if kind == Node.FILE and show_diff:
diffs = _content_changes(old_node, new_node)
if diffs != []:
if diffs:
has_diffs = True
# elif None (means: manually compare to (previous))
show_entry = True
if (show_old or show_new) and (show_entry or not show_diff):
info = {'change': change,
'old': old_node and node_info(old_node, annotated),
'new': new_node and node_info(new_node, annotated),
'props': props,
'diffs': diffs}
files.append(new_node.path if new_node else \
old_node.path if old_node else '')
filestats[change] += 1
if change in Changeset.DIFF_CHANGES:
if chgset:
href = req.href.changeset(new_node.rev, reponame,
new_node.path)
title = _('Show the changeset %(id)s restricted to '
'%(path)s', id=display_rev(new_node.rev),
path=new_node.path)
else:
href = req.href.changeset(
new_node.created_rev, reponame,
new_node.created_path,
old=old_node.created_rev,
old_path=pathjoin(repos.reponame,
old_node.created_path))
title = _('Show the %(range)s differences restricted '
'to %(path)s', range='[%s:%s]' % (
display_rev(old_node.rev),
display_rev(new_node.rev)),
path=new_node.path)
info['href'] = href
info['title'] = old_node and title
if change in Changeset.DIFF_CHANGES and not show_diff:
info['hide_diff'] = True
else:
info = None
changes.append(info) # the sequence should be immutable
data.update({'has_diffs': has_diffs, 'changes': changes, 'xhr': xhr,
'filestats': filestats, 'annotated': annotated,
'files': files,
'location': self._get_parent_location(files),
'longcol': 'Revision', 'shortcol': 'r'})
if xhr: # render and return the content only
stream = Chrome(self.env).render_template(req, 'changeset.html',
data, fragment=True)
content = stream.select('//div[@id="content"]')
str_content = content.render('xhtml', encoding='utf-8')
req.send_header('Content-Length', len(str_content))
req.end_headers()
req.write(str_content)
raise RequestDone
return data
def _render_diff(self, req, filename, repos, data):
"""Raw Unified Diff version"""
req.send_response(200)
req.send_header('Content-Type', 'text/x-patch;charset=utf-8')
req.send_header('Content-Disposition',
content_disposition('attachment', filename + '.diff'))
buf = StringIO()
mimeview = Mimeview(self.env)
for old_node, new_node, kind, change in repos.get_changes(
new_path=data['new_path'], new_rev=data['new_rev'],
old_path=data['old_path'], old_rev=data['old_rev']):
# TODO: Property changes
# Content changes
if kind == Node.DIRECTORY:
continue
new_content = old_content = ''
new_node_info = old_node_info = ('','')
if old_node:
if not old_node.is_viewable(req.perm):
continue
if mimeview.is_binary(old_node.content_type, old_node.path):
continue
old_content = old_node.get_content().read()
if mimeview.is_binary(content=old_content):
continue
old_node_info = (old_node.path, old_node.rev)
old_content = mimeview.to_unicode(old_content,
old_node.content_type)
if new_node:
if not new_node.is_viewable(req.perm):
continue
if mimeview.is_binary(new_node.content_type, new_node.path):
continue
new_content = new_node.get_content().read()
if mimeview.is_binary(content=new_content):
continue
new_node_info = (new_node.path, new_node.rev)
new_path = new_node.path
new_content = mimeview.to_unicode(new_content,
new_node.content_type)
else:
old_node_path = repos.normalize_path(old_node.path)
diff_old_path = repos.normalize_path(data['old_path'])
new_path = pathjoin(data['new_path'],
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/tests/wikisyntax.py | trac/trac/versioncontrol/web_ui/tests/wikisyntax.py | # -*- encoding: utf-8 -*-
import unittest
from trac.test import Mock
from trac.versioncontrol import NoSuchChangeset, NoSuchNode
from trac.versioncontrol.api import *
from trac.versioncontrol.web_ui import *
from trac.wiki.tests import formatter
def _get_changeset(rev):
if rev == '1':
return Mock(message="start", is_viewable=lambda perm: True)
else:
raise NoSuchChangeset(rev)
def _normalize_rev(rev):
try:
return int(rev)
except ValueError:
if rev == 'head':
return '200'
else:
raise NoSuchChangeset(rev)
def _get_node(path, rev=None):
if path == 'foo':
return Mock(path=path, rev=rev, isfile=False,
is_viewable=lambda resource: True)
elif path == 'missing/file':
raise NoSuchNode(path, rev)
else:
return Mock(path=path, rev=rev, isfile=True,
is_viewable=lambda resource: True)
def _get_repository(reponame):
return Mock(reponame=reponame, youngest_rev='200',
get_changeset=_get_changeset,
normalize_rev=_normalize_rev,
get_node=_get_node)
def repository_setup(tc):
setattr(tc.env, 'get_repository', _get_repository)
setattr(RepositoryManager(tc.env), 'get_repository', _get_repository)
CHANGESET_TEST_CASES = u"""
============================== changeset: link resolver
changeset:1
changeset:12
changeset:abc
changeset:1, changeset:1/README.txt
------------------------------
<p>
<a class="changeset" href="/changeset/1" title="start">changeset:1</a>
<a class="missing changeset" title="No changeset 12 in the repository">changeset:12</a>
<a class="missing changeset" title="No changeset abc in the repository">changeset:abc</a>
<a class="changeset" href="/changeset/1" title="start">changeset:1</a>, <a class="changeset" href="/changeset/1/README.txt" title="start">changeset:1/README.txt</a>
</p>
------------------------------
============================== changeset: link resolver + query and fragment
changeset:1?format=diff
changeset:1#file0
------------------------------
<p>
<a class="changeset" href="/changeset/1?format=diff" title="start">changeset:1?format=diff</a>
<a class="changeset" href="/changeset/1#file0" title="start">changeset:1#file0</a>
</p>
------------------------------
============================== changeset shorthand syntax
[1], r1
[12], r12, rABC
[1/README.txt], r1/trunk, rABC/trunk
------------------------------
<p>
<a class="changeset" href="/changeset/1" title="start">[1]</a>, <a class="changeset" href="/changeset/1" title="start">r1</a>
<a class="missing changeset" title="No changeset 12 in the repository">[12]</a>, <a class="missing changeset" title="No changeset 12 in the repository">r12</a>, rABC
<a class="changeset" href="/changeset/1/README.txt" title="start">[1/README.txt]</a>, <a class="changeset" href="/changeset/1/trunk" title="start">r1/trunk</a>, rABC/trunk
</p>
------------------------------
============================== changeset shorthand syntax + query and fragment
[1?format=diff]
[1#file0]
[1/README.txt?format=diff]
[1/README.txt#file0]
------------------------------
<p>
<a class="changeset" href="/changeset/1?format=diff" title="start">[1?format=diff]</a>
<a class="changeset" href="/changeset/1#file0" title="start">[1#file0]</a>
<a class="changeset" href="/changeset/1/README.txt?format=diff" title="start">[1/README.txt?format=diff]</a>
<a class="changeset" href="/changeset/1/README.txt#file0" title="start">[1/README.txt#file0]</a>
</p>
------------------------------
============================== escaping the above
![1], !r1
------------------------------
<p>
[1], r1
</p>
------------------------------
============================== unicode digits
[₁₂₃], r₁₂₃, [₀A₁B₂C₃D]
------------------------------
<p>
[₁₂₃], r₁₂₃, [₀A₁B₂C₃D]
</p>
------------------------------
============================== Link resolver counter examples
Change:[10] There should be a link to changeset [10]
rfc and rfc:4180 should not be changeset links, neither should rfc4180
------------------------------
<p>
Change:<a class="missing changeset" title="No changeset 10 in the repository">[10]</a> There should be a link to changeset <a class="missing changeset" title="No changeset 10 in the repository">[10]</a>
</p>
<p>
rfc and rfc:4180 should not be changeset links, neither should rfc4180
</p>
------------------------------
Change:<a class="missing changeset" title="No changeset 10 in the repository">[10]</a> There should be a link to changeset <a class="missing changeset" title="No changeset 10 in the repository">[10]</a>
rfc and rfc:4180 should not be changeset links, neither should rfc4180
============================== InterTrac for changesets
trac:changeset:2081
[trac:changeset:2081 Trac r2081]
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/changeset%3A2081" title="changeset:2081 in Trac's Trac"><span class="icon"></span>trac:changeset:2081</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/changeset%3A2081" title="changeset:2081 in Trac's Trac"><span class="icon"></span>Trac r2081</a>
</p>
------------------------------
============================== Changeset InterTrac shorthands
[T2081]
[trac 2081]
[trac 2081/trunk]
T:r2081
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/changeset%3A2081" title="changeset:2081 in Trac's Trac"><span class="icon"></span>[T2081]</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/changeset%3A2081" title="changeset:2081 in Trac's Trac"><span class="icon"></span>[trac 2081]</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/changeset%3A2081/trunk" title="changeset:2081/trunk in Trac\'s Trac"><span class="icon"></span>[trac 2081/trunk]</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/r2081" title="r2081 in Trac's Trac"><span class="icon"></span>T:r2081</a>
</p>
------------------------------
""" #"
LOG_TEST_CASES = u"""
============================== Log range TracLinks
[1:2], r1:2, [12:23], r12:23
[1:2/trunk], r1:2/trunk
[2:1/trunk] reversed, r2:1/trunk reversed
------------------------------
<p>
<a class="source" href="/log/?revs=1-2">[1:2]</a>, <a class="source" href="/log/?revs=1-2">r1:2</a>, <a class="source" href="/log/?revs=12-23">[12:23]</a>, <a class="source" href="/log/?revs=12-23">r12:23</a>
<a class="source" href="/log/trunk?revs=1-2">[1:2/trunk]</a>, <a class="source" href="/log/trunk?revs=1-2">r1:2/trunk</a>
<a class="source" href="/log/trunk?revs=1-2">[2:1/trunk]</a> reversed, <a class="source" href="/log/trunk?revs=1-2">r2:1/trunk</a> reversed
</p>
------------------------------
============================== Big ranges (#9955 regression)
[1234567890:12345678901]
------------------------------
<p>
<a class="source" href="/log/?revs=1234567890-12345678901">[1234567890:12345678901]</a>
</p>
------------------------------
<a class="source" href="/log/?revs=1234567890-12345678901">[1234567890:12345678901]</a>
============================== Escaping Log range TracLinks
![1:2], !r1:2, ![12:23], !r12:23
------------------------------
<p>
[1:2], r1:2, [12:23], r12:23
</p>
------------------------------
[1:2], r1:2, [12:23], r12:23
============================== log: link resolver
log:@12
log:trunk
log:trunk@12
log:trunk@12:23
log:trunk@12-23
log:trunk:12:23
log:trunk:12-23
log:trunk:12-head
log:trunk:12@23 (bad, but shouldn't error out)
------------------------------
<p>
<a class="source" href="/log/?rev=12">log:@12</a>
<a class="source" href="/log/trunk">log:trunk</a>
<a class="source" href="/log/trunk?rev=12">log:trunk@12</a>
<a class="source" href="/log/trunk?revs=12-23">log:trunk@12:23</a>
<a class="source" href="/log/trunk?revs=12-23">log:trunk@12-23</a>
<a class="source" href="/log/trunk?revs=12-23">log:trunk:12:23</a>
<a class="source" href="/log/trunk?revs=12-23">log:trunk:12-23</a>
<a class="source" href="/log/trunk?revs=12-200">log:trunk:12-head</a>
<a class="source" href="/log/trunk">log:trunk:12@23</a> (bad, but shouldn't error out)
</p>
------------------------------
============================== log: link resolver + query
log:?limit=10
log:@12?limit=10
log:trunk?limit=10
log:trunk@12?limit=10
[10:20?verbose=yes&format=changelog]
[10:20/trunk?verbose=yes&format=changelog]
------------------------------
<p>
<a class="source" href="/log/?limit=10">log:?limit=10</a>
<a class="source" href="/log/?rev=12&limit=10">log:@12?limit=10</a>
<a class="source" href="/log/trunk?limit=10">log:trunk?limit=10</a>
<a class="source" href="/log/trunk?rev=12&limit=10">log:trunk@12?limit=10</a>
<a class="source" href="/log/?revs=10-20&verbose=yes&format=changelog">[10:20?verbose=yes&format=changelog]</a>
<a class="source" href="/log/trunk?revs=10-20&verbose=yes&format=changelog">[10:20/trunk?verbose=yes&format=changelog]</a>
</p>
------------------------------
============================== Multiple Log ranges
r12:20,25,35:56,68,69,100-120
[12:20,25,35:56,68,69,100-120]
[12:20,25,88:head,68,69] (not supported)
------------------------------
<p>
<a class="source" href="/log/?revs=12-20%2C25%2C35-56%2C68-69%2C100-120">r12:20,25,35:56,68,69,100-120</a>
<a class="source" href="/log/?revs=12-20%2C25%2C35-56%2C68-69%2C100-120">[12:20,25,35:56,68,69,100-120]</a>
[12:20,25,88:head,68,69] (not supported)
</p>
------------------------------
============================== Link resolver counter examples
rfc:4180 should not be a log link
------------------------------
<p>
rfc:4180 should not be a log link
</p>
------------------------------
============================== Log range InterTrac shorthands
[T3317:3318]
[trac 3317:3318]
[trac 3317:3318/trunk]
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/log%3A/%403317%3A3318" title="log:/@3317:3318 in Trac\'s Trac"><span class="icon"></span>[T3317:3318]</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/log%3A/%403317%3A3318" title="log:/@3317:3318 in Trac\'s Trac"><span class="icon"></span>[trac 3317:3318]</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/log%3A/trunk%403317%3A3318" title="log:/trunk@3317:3318 in Trac\'s Trac"><span class="icon"></span>[trac 3317:3318/trunk]</a>
</p>
------------------------------
============================== Log range with unicode digits
r₁₂:₂₀,₂₅,₃₀-₃₅
[₁₂:₂₀,₂₅,₃₀-₃₅]
[T₃₃₁₇:₃₃₁₈]
[trac ₃₃₁₇:₃₃₁₈]
------------------------------
<p>
r₁₂:₂₀,₂₅,₃₀-₃₅
[₁₂:₂₀,₂₅,₃₀-₃₅]
[T₃₃₁₇:₃₃₁₈]
[trac ₃₃₁₇:₃₃₁₈]
</p>
------------------------------
"""
DIFF_TEST_CASES = u"""
============================== diff: link resolver
diff:trunk//branch
diff:trunk@12//branch@23
diff:trunk@12:23
diff:@12:23
------------------------------
<p>
<a class="changeset" href="/changeset?new_path=branch&old_path=trunk" title="Diff from trunk@latest to branch@latest">diff:trunk//branch</a>
<a class="changeset" href="/changeset?new=23&new_path=branch&old=12&old_path=trunk" title="Diff from trunk@12 to branch@23">diff:trunk@12//branch@23</a>
<a class="changeset" href="/changeset?new=23&new_path=trunk&old=12&old_path=trunk" title="Diff [12:23] for trunk">diff:trunk@12:23</a>
<a class="changeset" href="/changeset?new=23&old=12" title="Diff [12:23] for /">diff:@12:23</a>
</p>
------------------------------
============================== diff: link resolver + query
diff:trunk//branch?format=diff
------------------------------
<p>
<a class="changeset" href="/changeset?new_path=branch&old_path=trunk&format=diff" title="Diff from trunk@latest to branch@latest">diff:trunk//branch?format=diff</a>
</p>
------------------------------
============================== diff: link, empty diff
diff://
------------------------------
<p>
<a class="changeset" title="Diff [latest:latest] for /">diff://</a>
</p>
------------------------------
"""
SOURCE_TEST_CASES = u"""
============================== source: link resolver
source:/foo/bar
source:/foo/bar#42 # no long works as rev spec
source:/foo/bar#head #
source:/foo/bar@42
source:/foo/bar@head
source:/foo%20bar/baz%2Bquux
source:@42
source:/foo/bar@42#L20
source:/foo/bar@head#L20
source:/foo/bar@#L20
source:/missing/file
------------------------------
<p>
<a class="source" href="/browser/foo/bar">source:/foo/bar</a><a class="trac-rawlink" href="/export/HEAD/foo/bar" title="Download"></a>
<a class="source" href="/browser/foo/bar#42">source:/foo/bar#42</a><a class="trac-rawlink" href="/export/HEAD/foo/bar#42" title="Download"></a> # no long works as rev spec
<a class="source" href="/browser/foo/bar#head">source:/foo/bar#head</a><a class="trac-rawlink" href="/export/HEAD/foo/bar#head" title="Download"></a> #
<a class="source" href="/browser/foo/bar?rev=42">source:/foo/bar@42</a><a class="trac-rawlink" href="/export/42/foo/bar" title="Download"></a>
<a class="source" href="/browser/foo/bar?rev=head">source:/foo/bar@head</a><a class="trac-rawlink" href="/export/head/foo/bar" title="Download"></a>
<a class="source" href="/browser/foo%2520bar/baz%252Bquux">source:/foo%20bar/baz%2Bquux</a><a class="trac-rawlink" href="/export/HEAD/foo%2520bar/baz%252Bquux" title="Download"></a>
<a class="source" href="/browser/?rev=42">source:@42</a><a class="trac-rawlink" href="/export/42/" title="Download"></a>
<a class="source" href="/browser/foo/bar?rev=42#L20">source:/foo/bar@42#L20</a><a class="trac-rawlink" href="/export/42/foo/bar#L20" title="Download"></a>
<a class="source" href="/browser/foo/bar?rev=head#L20">source:/foo/bar@head#L20</a><a class="trac-rawlink" href="/export/head/foo/bar#L20" title="Download"></a>
<a class="source" href="/browser/foo/bar#L20">source:/foo/bar@#L20</a><a class="trac-rawlink" href="/export/HEAD/foo/bar#L20" title="Download"></a>
<a class="missing source">source:/missing/file</a>
</p>
------------------------------
============================== source: link resolver + query
source:/foo?order=size&desc=1
source:/foo/bar?format=raw
------------------------------
<p>
<a class="source" href="/browser/foo?order=size&desc=1">source:/foo?order=size&desc=1</a>
<a class="source" href="/browser/foo/bar?format=raw">source:/foo/bar?format=raw</a><a class="trac-rawlink" href="/export/HEAD/foo/bar" title="Download"></a>
</p>
------------------------------
============================== source: provider, with quoting
source:'even with whitespaces'
source:"even with whitespaces"
[source:'even with whitespaces' Path with spaces]
[source:"even with whitespaces" Path with spaces]
------------------------------
<p>
<a class="source" href="/browser/even%20with%20whitespaces">source:'even with whitespaces'</a><a class="trac-rawlink" href="/export/HEAD/even%20with%20whitespaces" title="Download"></a>
<a class="source" href="/browser/even%20with%20whitespaces">source:"even with whitespaces"</a><a class="trac-rawlink" href="/export/HEAD/even%20with%20whitespaces" title="Download"></a>
<a class="source" href="/browser/even%20with%20whitespaces">Path with spaces</a><a class="trac-rawlink" href="/export/HEAD/even%20with%20whitespaces" title="Download"></a>
<a class="source" href="/browser/even%20with%20whitespaces">Path with spaces</a><a class="trac-rawlink" href="/export/HEAD/even%20with%20whitespaces" title="Download"></a>
</p>
------------------------------
============================== export: link resolver
export:/foo/bar.html
export:123:/foo/pict.gif
export:/foo/pict.gif@123
------------------------------
<p>
<a class="export" href="/export/HEAD/foo/bar.html" title="Download">export:/foo/bar.html</a>
<a class="export" href="/export/123/foo/pict.gif" title="Download">export:123:/foo/pict.gif</a>
<a class="export" href="/export/123/foo/pict.gif" title="Download">export:/foo/pict.gif@123</a>
</p>
------------------------------
============================== export: link resolver + fragment
export:/foo/bar.html#header
------------------------------
<p>
<a class="export" href="/export/HEAD/foo/bar.html#header" title="Download">export:/foo/bar.html#header</a>
</p>
------------------------------
""" # " (be Emacs friendly...)
def suite():
suite = unittest.TestSuite()
suite.addTest(formatter.suite(CHANGESET_TEST_CASES, repository_setup,
__file__))
suite.addTest(formatter.suite(LOG_TEST_CASES, repository_setup,
file=__file__))
suite.addTest(formatter.suite(DIFF_TEST_CASES, file=__file__))
suite.addTest(formatter.suite(SOURCE_TEST_CASES, repository_setup,
file=__file__))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/versioncontrol/web_ui/tests/__init__.py | trac/trac/versioncontrol/web_ui/tests/__init__.py | import unittest
from trac.versioncontrol.web_ui.tests import wikisyntax
def suite():
suite = unittest.TestSuite()
suite.addTest(wikisyntax.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/api.py | trac/trac/admin/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import os.path
import sys
import traceback
from trac.core import *
from trac.util.text import levenshtein_distance
from trac.util.translation import _
console_date_format = '%Y-%m-%d'
console_datetime_format = '%Y-%m-%d %H:%M:%S'
console_date_format_hint = 'YYYY-MM-DD'
class IAdminPanelProvider(Interface):
"""Extension point interface for adding panels to the web-based
administration interface.
"""
def get_admin_panels(req):
"""Return a list of available admin panels.
The items returned by this function must be tuples of the form
`(category, category_label, page, page_label)`.
"""
def render_admin_panel(req, category, page, path_info):
"""Process a request for an admin panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
class AdminCommandError(TracError):
"""Exception raised when an admin command cannot be executed."""
def __init__(self, msg, show_usage=False, cmd=None):
TracError.__init__(self, msg)
self.show_usage = show_usage
self.cmd = cmd
class IAdminCommandProvider(Interface):
"""Extension point interface for adding commands to the console
administration interface `trac-admin`.
"""
def get_admin_commands():
"""Return a list of available admin commands.
The items returned by this function must be tuples of the form
`(command, args, help, complete, execute)`, where `command` contains
the space-separated command and sub-command names, `args` is a string
describing the command arguments and `help` is the help text. The
first paragraph of the help text is taken as a short help, shown in the
list of commands.
`complete` is called to auto-complete the command arguments, with the
current list of arguments as its only argument. It should return a list
of relevant values for the last argument in the list.
`execute` is called to execute the command, with the command arguments
passed as positional arguments.
"""
class AdminCommandManager(Component):
"""trac-admin command manager."""
providers = ExtensionPoint(IAdminCommandProvider)
def get_command_help(self, args=[]):
"""Return help information for a set of commands."""
commands = []
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
if parts[:len(args)] == args:
commands.append(cmd[:3])
commands.sort()
return commands
def complete_command(self, args, cmd_only=False):
"""Perform auto-completion on the given arguments."""
comp = []
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
plen = min(len(parts), len(args) - 1)
if args[:plen] != parts[:plen]: # Prefix doesn't match
continue
elif len(args) <= len(parts): # Command name
comp.append(parts[len(args) - 1])
elif not cmd_only: # Arguments
if cmd[3] is None:
return []
return cmd[3](args[len(parts):]) or []
return comp
def execute_command(self, *args):
"""Execute a command given by a list of arguments."""
args = list(args)
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
if args[:len(parts)] == parts:
f = cmd[4]
fargs = args[len(parts):]
try:
return f(*fargs)
except AdminCommandError, e:
e.cmd = ' '.join(parts)
raise
except TypeError, e:
tb = traceback.extract_tb(sys.exc_info()[2])
if len(tb) == 1:
raise AdminCommandError(_("Invalid arguments"),
show_usage=True,
cmd=' '.join(parts))
raise
raise AdminCommandError(_("Command not found"), show_usage=True)
def get_similar_commands(self, arg, n=5):
if not arg:
return []
cmds = set()
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
cmds.add(cmd[0].split()[0]) # use only first token
def score(cmd, arg):
if cmd.startswith(arg):
return 0
return levenshtein_distance(cmd, arg) / float(len(cmd) + len(arg))
similars = sorted((score(cmd, arg), cmd) for cmd in cmds)
similars = [cmd for val, cmd in similars if val <= 0.5]
return similars[:n]
class PrefixList(list):
"""A list of prefixes for command argument auto-completion."""
def complete(self, text):
return list(set(a for a in self if a.startswith(text)))
def path_startswith(path, prefix):
return os.path.normcase(path).startswith(os.path.normcase(prefix))
class PathList(list):
"""A list of paths for command argument auto-completion."""
def complete(self, text):
"""Return the items in the list matching text."""
matches = list(set(a for a in self if path_startswith(a, text)))
if len(matches) == 1 and not os.path.isdir(matches[0]):
matches[0] += ' '
return matches
def get_dir_list(path, dirs_only=False):
"""Return a list of paths to filesystem entries in the same directory
as the given path."""
dname = os.path.dirname(path)
d = os.path.join(os.getcwd(), dname)
result = PathList()
try:
dlist = os.listdir(d)
except OSError:
return result
for entry in dlist:
path = os.path.normpath(os.path.join(dname, entry))
try:
if os.path.isdir(path):
result.append(os.path.join(path, ''))
elif not dirs_only:
result.append(path)
except OSError:
pass
return result
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/web_ui.py | trac/trac/admin/web_ui.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005 Jonas Borgström <jonas@edgewall.com>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
from __future__ import with_statement
from functools import partial
import os
import pkg_resources
import re
import shutil
try:
from babel.core import Locale
except ImportError:
Locale = None
from genshi import HTML
from genshi.builder import tag
from trac.admin.api import IAdminPanelProvider
from trac.core import *
from trac.loader import get_plugin_info, get_plugins_dir
from trac.perm import PermissionSystem, IPermissionRequestor
from trac.util.datefmt import all_timezones
from trac.util.text import exception_to_unicode, \
unicode_to_base64, unicode_from_base64
from trac.util.translation import _, get_available_locales, ngettext
from trac.web import HTTPNotFound, IRequestHandler
from trac.web.chrome import add_notice, add_stylesheet, \
add_warning, Chrome, INavigationContributor, \
ITemplateProvider
from trac.wiki.formatter import format_to_html
try:
from webadmin import IAdminPageProvider
except ImportError:
IAdminPageProvider = None
try:
from acct_mgr.admin import AccountManagerAdminPanel
except:
AccountManagerAdminPanel = None
class AdminModule(Component):
"""Web administration interface provider and panel manager."""
implements(INavigationContributor, IRequestHandler, ITemplateProvider)
panel_providers = ExtensionPoint(IAdminPanelProvider)
if IAdminPageProvider:
old_providers = ExtensionPoint(IAdminPageProvider)
else:
old_providers = None
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'admin'
def get_navigation_items(self, req):
# The 'Admin' navigation item is only visible if at least one
# admin panel is available
panels, providers = self._get_panels(req)
if panels:
yield 'mainnav', 'admin', tag.a(_('Admin'), href=req.href.admin(),
title=_('Administration'))
# IRequestHandler methods
def match_request(self, req):
match = re.match('/admin(?:/([^/]+)(?:/([^/]+)(?:/(.+))?)?)?$',
req.path_info)
if match:
req.args['cat_id'] = match.group(1)
req.args['panel_id'] = match.group(2)
req.args['path_info'] = match.group(3)
return True
def process_request(self, req):
panels, providers = self._get_panels(req)
if not panels:
raise HTTPNotFound(_('No administration panels available'))
def _panel_order(p1, p2):
if p1[::2] == ('general', 'basics'):
return -1
elif p2[::2] == ('general', 'basics'):
return 1
elif p1[0] == 'general':
if p2[0] == 'general':
return cmp(p1[1:], p2[1:])
return -1
elif p2[0] == 'general':
if p1[0] == 'general':
return cmp(p1[1:], p2[1:])
return 1
return cmp(p1, p2)
panels.sort(_panel_order)
cat_id = req.args.get('cat_id') or panels[0][0]
panel_id = req.args.get('panel_id')
path_info = req.args.get('path_info')
if not panel_id:
try:
panel_id = filter(
lambda panel: panel[0] == cat_id, panels)[0][2]
except IndexError:
raise HTTPNotFound(_('Unknown administration panel'))
provider = providers.get((cat_id, panel_id), None)
if not provider:
raise HTTPNotFound(_('Unknown administration panel'))
if hasattr(provider, 'render_admin_panel'):
template, data = provider.render_admin_panel(req, cat_id, panel_id,
path_info)
else: # support for legacy WebAdmin panels
data = {}
cstmpl, ct = provider.process_admin_request(req, cat_id, panel_id,
path_info)
output = cstmpl.render()
title = _("Untitled")
for panel in panels:
if (panel[0], panel[2]) == (cat_id, panel_id):
title = panel[3]
data.update({'page_title': title, 'page_body': HTML(output)})
template = 'admin_legacy.html'
data.update({
'active_cat': cat_id, 'active_panel': panel_id,
'panel_href': partial(req.href, 'admin', cat_id, panel_id),
'panels': [{
'category': {'id': panel[0], 'label': panel[1]},
'panel': {'id': panel[2], 'label': panel[3]}
} for panel in panels]
})
add_stylesheet(req, 'common/css/admin.css')
return template, data, None
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.admin', 'templates')]
# Internal methods
def _get_panels(self, req):
"""Return a list of available admin panels."""
panels = []
providers = {}
for provider in self.panel_providers:
add_provider = True
if AccountManagerAdminPanel is not None and \
isinstance(provider, AccountManagerAdminPanel) and \
getattr(self.env, 'parent', None):
add_provider = False
if add_provider:
p = list(provider.get_admin_panels(req) or [])
for panel in p:
providers[(panel[0], panel[2])] = provider
panels += p
# Add panels contributed by legacy WebAdmin plugins
if IAdminPageProvider:
for provider in self.old_providers:
p = list(provider.get_admin_pages(req))
for page in p:
providers[(page[0], page[2])] = provider
panels += p
return panels, providers
def _save_config(config, req, log, notices=None):
"""Try to save the config, and display either a success notice or a
failure warning.
"""
try:
config.save()
if notices is None:
notices = [_('Your changes have been saved.')]
for notice in notices:
add_notice(req, notice)
except Exception, e:
log.error('Error writing to trac.ini: %s', exception_to_unicode(e))
add_warning(req, _('Error writing to trac.ini, make sure it is '
'writable by the web server. Your changes have '
'not been saved.'))
class BasicsAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm:
yield ('general', _('General'), 'basics', _('Basic Settings'))
def render_admin_panel(self, req, cat, page, path_info):
req.perm.require('TRAC_ADMIN')
if Locale:
locales = [Locale.parse(locale)
for locale in get_available_locales()]
languages = sorted((str(locale), locale.display_name)
for locale in locales)
else:
locales, languages = [], []
if req.method == 'POST':
for option in ('name', 'url', 'descr'):
self.config.set('project', option, req.args.get(option))
default_timezone = req.args.get('default_timezone')
if default_timezone not in all_timezones:
default_timezone = ''
self.config.set('trac', 'default_timezone', default_timezone)
default_language = req.args.get('default_language')
if default_language not in locales:
default_language = ''
self.config.set('trac', 'default_language', default_language)
default_date_format = req.args.get('default_date_format')
if default_date_format != 'iso8601':
default_date_format = ''
self.config.set('trac', 'default_date_format', default_date_format)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
default_timezone = self.config.get('trac', 'default_timezone')
default_language = self.config.get('trac', 'default_language')
default_date_format = self.config.get('trac', 'default_date_format')
data = {
'default_timezone': default_timezone,
'timezones': all_timezones,
'default_language': default_language.replace('-', '_'),
'languages': languages,
'default_date_format': default_date_format,
}
Chrome(self.env).add_textarea_grips(req)
return 'admin_basics.html', data
class LoggingAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm and not getattr(self.env, 'parent', None):
yield ('general', _('General'), 'logging', _('Logging'))
def render_admin_panel(self, req, cat, page, path_info):
if getattr(self.env, 'parent', None):
raise PermissionError()
log_type = self.env.log_type
log_level = self.env.log_level
log_file = self.env.log_file
log_dir = os.path.join(self.env.path, 'log')
log_types = [
dict(name='none', label=_('None'), selected=log_type == 'none', disabled=False),
dict(name='stderr', label=_('Console'),
selected=log_type == 'stderr', disabled=False),
dict(name='file', label=_('File'), selected=log_type == 'file',
disabled=False),
dict(name='syslog', label=_('Syslog'), disabled=os.name != 'posix',
selected=log_type in ('unix', 'syslog')),
dict(name='eventlog', label=_('Windows event log'),
disabled=os.name != 'nt',
selected=log_type in ('winlog', 'eventlog', 'nteventlog')),
]
log_levels = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
if req.method == 'POST':
changed = False
new_type = req.args.get('log_type')
if new_type not in [t['name'] for t in log_types]:
raise TracError(
_('Unknown log type %(type)s', type=new_type),
_('Invalid log type')
)
if new_type != log_type:
self.config.set('logging', 'log_type', new_type)
changed = True
log_type = new_type
if log_type == 'none':
self.config.remove('logging', 'log_level')
changed = True
else:
new_level = req.args.get('log_level')
if new_level not in log_levels:
raise TracError(
_('Unknown log level %(level)s', level=new_level),
_('Invalid log level'))
if new_level != log_level:
self.config.set('logging', 'log_level', new_level)
changed = True
log_level = new_level
if log_type == 'file':
new_file = req.args.get('log_file', 'trac.log')
if new_file != log_file:
self.config.set('logging', 'log_file', new_file)
changed = True
log_file = new_file
if not log_file:
raise TracError(_('You must specify a log file'),
_('Missing field'))
else:
self.config.remove('logging', 'log_file')
changed = True
if changed:
_save_config(self.config, req, self.log),
req.redirect(req.href.admin(cat, page))
data = {
'type': log_type, 'types': log_types,
'level': log_level, 'levels': log_levels,
'file': log_file, 'dir': log_dir
}
return 'admin_logging.html', {'log': data}
class PermissionAdminPanel(Component):
implements(IAdminPanelProvider, IPermissionRequestor)
# IPermissionRequestor methods
def get_permission_actions(self):
actions = ['PERMISSION_GRANT', 'PERMISSION_REVOKE']
return actions + [('PERMISSION_ADMIN', actions)]
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'PERMISSION_GRANT' in req.perm or 'PERMISSION_REVOKE' in req.perm:
yield ('general', _('General'), 'perm', _('Permissions'))
def render_admin_panel(self, req, cat, page, path_info):
perm = PermissionSystem(self.env)
all_permissions = perm.get_all_permissions()
all_actions = perm.get_actions()
if req.method == 'POST':
subject = req.args.get('subject', '').strip()
action = req.args.get('action')
group = req.args.get('group', '').strip()
if subject and subject.isupper() or \
group and group.isupper():
raise TracError(_('All upper-cased tokens are reserved for '
'permission names'))
# Grant permission to subject
if req.args.get('add') and subject and action:
req.perm.require('PERMISSION_GRANT')
if action not in all_actions:
raise TracError(_('Unknown action'))
req.perm.require(action)
if (subject, action) not in all_permissions:
perm.grant_permission(subject, action)
add_notice(req, _('The subject %(subject)s has been '
'granted the permission %(action)s.',
subject=subject, action=action))
req.redirect(req.href.admin(cat, page))
else:
add_warning(req, _('The permission %(action)s was already '
'granted to %(subject)s.',
action=action, subject=subject))
# Add subject to group
elif req.args.get('add') and subject and group:
req.perm.require('PERMISSION_GRANT')
for action in perm.get_user_permissions(group):
if not action in all_actions: # plugin disabled?
self.env.log.warn("Adding %s to group %s: " \
"Permission %s unavailable, skipping perm check." \
% (subject, group, action))
else:
req.perm.require(action)
if (subject, group) not in all_permissions:
perm.grant_permission(subject, group)
add_notice(req, _('The subject %(subject)s has been added '
'to the group %(group)s.',
subject=subject, group=group))
req.redirect(req.href.admin(cat, page))
else:
add_warning(req, _('The subject %(subject)s was already '
'added to the group %(group)s.',
subject=subject, group=group))
# Remove permissions action
elif req.args.get('remove') and req.args.get('sel'):
req.perm.require('PERMISSION_REVOKE')
sel = req.args.get('sel')
sel = sel if isinstance(sel, list) else [sel]
for key in sel:
subject, action = key.split(':', 1)
subject = unicode_from_base64(subject)
action = unicode_from_base64(action)
if (subject, action) in perm.get_all_permissions():
perm.revoke_permission(subject, action)
add_notice(req, _('The selected permissions have been '
'revoked.'))
req.redirect(req.href.admin(cat, page))
perms = [perm for perm in all_permissions if perm[1].isupper()]
groups = [perm for perm in all_permissions if not perm[1].isupper()]
return 'admin_perms.html', {
'actions': all_actions, 'perms': perms, 'groups': groups,
'unicode_to_base64': unicode_to_base64
}
class PluginAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm and not getattr(self.env, 'parent', None):
yield ('general', _('General'), 'plugin', _('Plugins'))
def render_admin_panel(self, req, cat, page, path_info):
if getattr(self.env, 'parent', None):
raise PermissionError()
req.perm.require('TRAC_ADMIN')
if req.method == 'POST':
if 'install' in req.args:
self._do_install(req)
elif 'uninstall' in req.args:
self._do_uninstall(req)
else:
self._do_update(req)
anchor = ''
if req.args.has_key('plugin'):
anchor = '#no%d' % (int(req.args.get('plugin')) + 1)
req.redirect(req.href.admin(cat, page) + anchor)
return self._render_view(req)
# Internal methods
def _do_install(self, req):
"""Install a plugin."""
if not req.args.has_key('plugin_file'):
raise TracError(_('No file uploaded'))
upload = req.args['plugin_file']
if isinstance(upload, unicode) or not upload.filename:
raise TracError(_('No file uploaded'))
plugin_filename = upload.filename.replace('\\', '/').replace(':', '/')
plugin_filename = os.path.basename(plugin_filename)
if not plugin_filename:
raise TracError(_('No file uploaded'))
if not plugin_filename.endswith('.egg') and \
not plugin_filename.endswith('.py'):
raise TracError(_('Uploaded file is not a Python source file or '
'egg'))
target_path = os.path.join(self.env.path, 'plugins', plugin_filename)
if os.path.isfile(target_path):
raise TracError(_('Plugin %(name)s already installed',
name=plugin_filename))
self.log.info('Installing plugin %s', plugin_filename)
flags = os.O_CREAT + os.O_WRONLY + os.O_EXCL
try:
flags += os.O_BINARY
except AttributeError:
# OS_BINARY not available on every platform
pass
with os.fdopen(os.open(target_path, flags, 0666), 'w') as target_file:
shutil.copyfileobj(upload.file, target_file)
self.log.info('Plugin %s installed to %s', plugin_filename,
target_path)
# TODO: Validate that the uploaded file is actually a valid Trac plugin
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_uninstall(self, req):
"""Uninstall a plugin."""
plugin_filename = req.args.get('plugin_filename')
if not plugin_filename:
return
plugin_path = os.path.join(self.env.path, 'plugins', plugin_filename)
if not os.path.isfile(plugin_path):
return
self.log.info('Uninstalling plugin %s', plugin_filename)
os.remove(plugin_path)
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_update(self, req):
"""Update component enablement."""
components = req.args.getlist('component')
enabled = req.args.getlist('enable')
added, removed = [], []
# FIXME: this needs to be more intelligent and minimize multiple
# component names to prefix rules
for component in components:
is_enabled = bool(self.env.is_component_enabled(component))
must_enable = component in enabled
if is_enabled != must_enable:
self.config.set('components', component,
'disabled' if is_enabled else 'enabled')
self.log.info('%sabling component %s',
'Dis' if is_enabled else 'En', component)
if must_enable:
added.append(component)
else:
removed.append(component)
if added or removed:
def make_list(items):
parts = [item.rsplit('.', 1) for item in items]
return tag.table(tag.tbody(
tag.tr(tag.td(c, class_='trac-name'),
tag.td('(%s.*)' % m, class_='trac-name'))
for m, c in parts), class_='trac-pluglist')
added.sort()
removed.sort()
notices = []
if removed:
msg = ngettext('The following component has been disabled:',
'The following components have been disabled:',
len(removed))
notices.append(tag(msg, make_list(removed)))
if added:
msg = ngettext('The following component has been enabled:',
'The following components have been enabled:',
len(added))
notices.append(tag(msg, make_list(added)))
_save_config(self.config, req, self.log, notices)
def _render_view(self, req):
plugins = get_plugin_info(self.env, include_core=True)
def safe_wiki_to_html(context, text):
try:
return format_to_html(self.env, context, text)
except Exception, e:
self.log.error('Unable to render component documentation: %s',
exception_to_unicode(e, traceback=True))
return tag.pre(text)
data = {
'plugins': plugins, 'show': req.args.get('show'),
'readonly': not os.access(get_plugins_dir(self.env),
os.F_OK + os.W_OK),
'safe_wiki_to_html': safe_wiki_to_html,
}
return 'admin_plugins.html', data
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/console.py | trac/trac/admin/console.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2010 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from __future__ import with_statement
import cmd
import locale
import os.path
import pkg_resources
from shlex import shlex
import StringIO
import sys
import traceback
from trac import __version__ as VERSION
from trac.admin import AdminCommandError, AdminCommandManager
from trac.core import TracError
from trac.env import Environment
from trac.ticket.model import *
from trac.util import translation
from trac.util.html import html
from trac.util.text import console_print, exception_to_unicode, printout, \
printerr, raw_input, to_unicode, \
getpreferredencoding
from trac.util.translation import _, ngettext, get_negotiated_locale, \
has_babel, cleandoc_
from trac.versioncontrol.api import RepositoryManager
from trac.wiki.admin import WikiAdmin
from trac.wiki.macros import WikiMacroBase
TRAC_VERSION = pkg_resources.get_distribution('Trac').version
rl_completion_suppress_append = None
LANG = os.environ.get('LANG')
def find_readline_lib():
"""Return the name (and possibly the full path) of the readline library
linked to the readline module.
"""
import readline
with open(readline.__file__, "rb") as f:
data = f.read()
import re
m = re.search('\0([^\0]*libreadline[^\0]*)\0', data)
if m:
return m.group(1)
return None
class TracAdmin(cmd.Cmd):
intro = ''
doc_header = 'Trac Admin Console %(version)s\n' \
'Available Commands:\n' \
% {'version': TRAC_VERSION}
ruler = ''
prompt = "Trac> "
envname = None
__env = None
needs_upgrade = None
def __init__(self, envdir=None):
cmd.Cmd.__init__(self)
try:
import readline
delims = readline.get_completer_delims()
for c in '-/:()\\':
delims = delims.replace(c, '')
readline.set_completer_delims(delims)
# Work around trailing space automatically inserted by libreadline
# until Python gets fixed, see http://bugs.python.org/issue5833
import ctypes
lib_name = find_readline_lib()
if lib_name is not None:
lib = ctypes.cdll.LoadLibrary(lib_name)
global rl_completion_suppress_append
rl_completion_suppress_append = ctypes.c_int.in_dll(lib,
"rl_completion_suppress_append")
except Exception:
pass
self.interactive = False
if envdir:
self.env_set(os.path.abspath(envdir))
def emptyline(self):
pass
def onecmd(self, line):
"""`line` may be a `str` or an `unicode` object"""
try:
if isinstance(line, str):
if self.interactive:
encoding = sys.stdin.encoding
else:
encoding = getpreferredencoding() # sys.argv
line = to_unicode(line, encoding)
if self.interactive:
line = line.replace('\\', '\\\\')
rv = cmd.Cmd.onecmd(self, line) or 0
except SystemExit:
raise
except AdminCommandError, e:
printerr(_("Error: %(msg)s", msg=to_unicode(e)))
if e.show_usage:
print
self.do_help(e.cmd or self.arg_tokenize(line)[0])
rv = 2
except TracError, e:
printerr(exception_to_unicode(e))
rv = 2
except Exception, e:
printerr(exception_to_unicode(e))
rv = 2
if self.env_check():
self.env.log.error("Exception in trac-admin command: %s",
exception_to_unicode(e, traceback=True))
if not self.interactive:
return rv
def run(self):
self.interactive = True
printout(_("""Welcome to trac-admin %(version)s
Interactive Trac administration console.
Copyright (C) 2003-2013 Edgewall Software
Type: '?' or 'help' for help on commands.
""", version=TRAC_VERSION))
self.cmdloop()
##
## Environment methods
##
def env_set(self, envname, env=None):
self.envname = envname
self.prompt = "Trac [%s]> " % self.envname
if env is not None:
self.__env = env
def env_check(self):
if not self.__env:
try:
self._init_env()
except Exception:
return False
return True
@property
def env(self):
try:
if not self.__env:
self._init_env()
return self.__env
except Exception, e:
printerr(_("Failed to open environment: %(err)s",
err=exception_to_unicode(e, traceback=True)))
sys.exit(1)
def _init_env(self):
self.__env = env = Environment(self.envname)
# fixup language according to env settings
if has_babel:
default = env.config.get('trac', 'default_language', '')
negotiated = get_negotiated_locale([LANG, default])
if negotiated:
translation.activate(negotiated)
##
## Utility methods
##
def arg_tokenize(self, argstr):
"""`argstr` is an `unicode` string
... but shlex is not unicode friendly.
"""
lex = shlex(argstr.encode('utf-8'), posix=True)
lex.whitespace_split = True
lex.commenters = ''
if os.name == 'nt':
lex.escape = ''
return [unicode(token, 'utf-8') for token in lex] or ['']
def word_complete(self, text, words):
words = list(set(a for a in words if a.startswith(text)))
if len(words) == 1:
words[0] += ' ' # Only one choice, skip to next arg
return words
@staticmethod
def split_help_text(text):
import re
paragraphs = re.split(r'(?m)(?:^[ \t]*\n){1,}', text)
return [re.sub(r'(?m)\s+', ' ', each.strip())
for each in paragraphs]
@classmethod
def print_doc(cls, docs, stream=None, short=False, long=False):
if stream is None:
stream = sys.stdout
docs = [doc for doc in docs if doc[2]]
if not docs:
return
if short:
max_len = max(len(doc[0]) for doc in docs)
for (cmd, args, doc) in docs:
paragraphs = cls.split_help_text(doc)
console_print(stream, '%s %s' % (cmd.ljust(max_len),
paragraphs[0]))
else:
import textwrap
for (cmd, args, doc) in docs:
paragraphs = cls.split_help_text(doc)
console_print(stream, '%s %s\n' % (cmd, args))
console_print(stream, ' %s\n' % paragraphs[0])
if (long or len(docs) == 1) and len(paragraphs) > 1:
for paragraph in paragraphs[1:]:
console_print(stream, textwrap.fill(paragraph, 79,
initial_indent=' ', subsequent_indent=' ')
+ '\n')
##
## Command dispatcher
##
def complete_line(self, text, line, cmd_only=False):
if rl_completion_suppress_append is not None:
rl_completion_suppress_append.value = 1
args = self.arg_tokenize(line)
if line and line[-1] == ' ': # Space starts new argument
args.append('')
if self.env_check():
cmd_mgr = AdminCommandManager(self.env)
try:
comp = cmd_mgr.complete_command(args, cmd_only)
except Exception, e:
printerr()
printerr(_('Completion error: %(err)s',
err=exception_to_unicode(e)))
self.env.log.error("trac-admin completion error: %s",
exception_to_unicode(e, traceback=True))
comp = []
if len(args) == 1:
comp.extend(name[3:] for name in self.get_names()
if name.startswith('do_'))
try:
return comp.complete(text)
except AttributeError:
return self.word_complete(text, comp)
def completenames(self, text, line, begidx, endidx):
return self.complete_line(text, line, True)
def completedefault(self, text, line, begidx, endidx):
return self.complete_line(text, line)
def default(self, line):
try:
if not self.__env:
self._init_env()
if self.needs_upgrade is None:
self.needs_upgrade = self.__env.needs_upgrade()
except TracError, e:
raise AdminCommandError(to_unicode(e))
except Exception, e:
raise AdminCommandError(exception_to_unicode(e))
args = self.arg_tokenize(line)
if args[0] == 'upgrade':
self.needs_upgrade = None
elif self.needs_upgrade:
raise TracError(_('The Trac Environment needs to be upgraded.\n\n'
'Run "trac-admin %(path)s upgrade"',
path=self.envname))
cmd_mgr = AdminCommandManager(self.env)
return cmd_mgr.execute_command(*args)
##
## Available Commands
##
## Help
_help_help = [('help', '', 'Show documentation')]
@classmethod
def all_docs(cls, env=None):
docs = (cls._help_help + cls._help_initenv)
if env is not None:
docs.extend(AdminCommandManager(env).get_command_help())
return docs
def complete_help(self, text, line, begidx, endidx):
return self.complete_line(text, line[5:], True)
def do_help(self, line=None):
arg = self.arg_tokenize(line)
if arg[0]:
doc = getattr(self, "_help_" + arg[0], None)
if doc is None and self.env_check():
cmd_mgr = AdminCommandManager(self.env)
doc = cmd_mgr.get_command_help(arg)
if doc:
self.print_doc(doc)
else:
printerr(_("No documentation found for '%(cmd)s'."
" Use 'help' to see the list of commands.",
cmd=' '.join(arg)))
cmds = cmd_mgr.get_similar_commands(arg[0])
if cmds:
printout('')
printout(ngettext("Did you mean this?",
"Did you mean one of these?",
len(cmds)))
for cmd in cmds:
printout(' ' + cmd)
else:
printout(_("trac-admin - The Trac Administration Console "
"%(version)s", version=TRAC_VERSION))
if not self.interactive:
print
printout(_("Usage: trac-admin </path/to/projenv> "
"[command [subcommand] [option ...]]\n")
)
printout(_("Invoking trac-admin without command starts "
"interactive mode.\n"))
env = self.env if self.env_check() else None
self.print_doc(self.all_docs(env), short=True)
## Quit / EOF
_help_quit = [('quit', '', 'Exit the program')]
_help_exit = _help_quit
_help_EOF = _help_quit
def do_quit(self, line):
print
sys.exit()
do_exit = do_quit # Alias
do_EOF = do_quit # Alias
## Initenv
_help_initenv = [
('initenv', '[<projectname> <db> [<repostype> <repospath>]]',
"""Create and initialize a new environment
If no arguments are given, then the required parameters are requested
interactively.
One or more optional arguments --inherit=PATH can be used to specify
the "[inherit] file" option at environment creation time, so that only
the options not already specified in one of the global configuration
files are written to the conf/trac.ini file of the newly created
environment. Relative paths are resolved relative to the "conf"
directory of the new environment.
""")]
def do_initdb(self, line):
self.do_initenv(line)
def get_initenv_args(self):
returnvals = []
printout(_("Creating a new Trac environment at %(envname)s",
envname=self.envname))
printout(_("""
Trac will first ask a few questions about your environment
in order to initialize and prepare the project database.
Please enter the name of your project.
This name will be used in page titles and descriptions.
"""))
dp = 'My Project'
returnvals.append(raw_input(_("Project Name [%(default)s]> ",
default=dp)).strip() or dp)
printout(_("""
Please specify the connection string for the database to use.
By default, a local SQLite database is created in the environment
directory. It is also possible to use an already existing
PostgreSQL database (check the Trac documentation for the exact
connection string syntax).
"""))
ddb = 'sqlite:db/trac.db'
prompt = _("Database connection string [%(default)s]> ", default=ddb)
returnvals.append(raw_input(prompt).strip() or ddb)
print
return returnvals
def do_initenv(self, line):
def initenv_error(msg):
printerr(_("Initenv for '%(env)s' failed.", env=self.envname),
"\n" + msg)
if self.env_check():
initenv_error(_("Does an environment already exist?"))
return 2
if os.path.exists(self.envname) and os.listdir(self.envname):
initenv_error(_("Directory exists and is not empty."))
return 2
if not os.path.exists(os.path.dirname(self.envname)):
initenv_error(_("Base directory '%(env)s' does not exist. Please "
"create it manually and retry.",
env=os.path.dirname(self.envname)))
return 2
arg = self.arg_tokenize(line)
inherit_paths = []
add_wiki = True
i = 0
while i < len(arg):
item = arg[i]
if item.startswith('--inherit='):
inherit_paths.append(arg.pop(i)[10:])
elif item.startswith('--nowiki'):
add_wiki = False
arg.pop(i)
else:
i += 1
arg = arg or [''] # Reset to usual empty in case we popped the only one
project_name = None
db_str = None
repository_type = None
repository_dir = None
if len(arg) == 1 and not arg[0]:
project_name, db_str = self.get_initenv_args()
elif len(arg) == 2:
project_name, db_str = arg
elif len(arg) == 4:
project_name, db_str, repository_type, repository_dir = arg
else:
initenv_error('Wrong number of arguments: %d' % len(arg))
return 2
try:
printout(_("Creating and Initializing Project"))
options = [
('project', 'name', project_name),
('trac', 'database', db_str),
]
if repository_dir:
options.extend([
('trac', 'repository_type', repository_type),
('trac', 'repository_dir', repository_dir),
])
if inherit_paths:
options.append(('inherit', 'file',
",\n ".join(inherit_paths)))
try:
self.__env = Environment(self.envname, create=True,
options=options)
except Exception, e:
initenv_error(_('Failed to create environment.'))
printerr(e)
traceback.print_exc()
sys.exit(1)
if add_wiki:
# Add a few default wiki pages
printout(_(" Installing default wiki pages"))
pages_dir = pkg_resources.resource_filename('trac.wiki',
'default-pages')
WikiAdmin(self.__env).load_pages(pages_dir)
if repository_dir:
try:
repos = RepositoryManager(self.__env).get_repository('')
if repos:
printout(_(" Indexing default repository"))
repos.sync(self._resync_feedback)
except TracError, e:
printerr(_("""
---------------------------------------------------------------------
Warning: couldn't index the default repository.
This can happen for a variety of reasons: wrong repository type,
no appropriate third party library for this repository type,
no actual repository at the specified repository path...
You can nevertheless start using your Trac environment, but
you'll need to check again your trac.ini file and the [trac]
repository_type and repository_path settings.
"""))
except Exception, e:
initenv_error(to_unicode(e))
traceback.print_exc()
return 2
printout(_("""
---------------------------------------------------------------------
Project environment for '%(project_name)s' created.
You may now configure the environment by editing the file:
%(config_path)s
If you'd like to take this new project environment for a test drive,
try running the Trac standalone web server `tracd`:
tracd --port 8000 %(project_path)s
Then point your browser to http://localhost:8000/%(project_dir)s.
There you can also browse the documentation for your installed
version of Trac, including information on further setup (such as
deploying Trac to a real web server).
The latest documentation can also always be found on the project
website:
http://trac.edgewall.org/
Congratulations!
""", project_name=project_name, project_path=self.envname,
project_dir=os.path.basename(self.envname),
config_path=os.path.join(self.envname, 'conf', 'trac.ini')))
def _resync_feedback(self, rev):
sys.stdout.write(' [%s]\r' % rev)
sys.stdout.flush()
class TracAdminHelpMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Display help for trac-admin commands.
Examples:
{{{
[[TracAdminHelp]] # all commands
[[TracAdminHelp(wiki)]] # all wiki commands
[[TracAdminHelp(wiki export)]] # the "wiki export" command
[[TracAdminHelp(upgrade)]] # the upgrade command
}}}
""")
def expand_macro(self, formatter, name, content):
if content:
arg = content.strip().split()
doc = getattr(TracAdmin, "_help_" + arg[0], None)
if doc is None:
cmd_mgr = AdminCommandManager(self.env)
doc = cmd_mgr.get_command_help(arg)
if not doc:
raise TracError('Unknown trac-admin command "%s"' % content)
else:
doc = TracAdmin.all_docs(self.env)
buf = StringIO.StringIO()
TracAdmin.print_doc(doc, buf, long=True)
return html.PRE(buf.getvalue(), class_='wiki')
def run(args=None):
"""Main entry point."""
if args is None:
args = sys.argv[1:]
locale = None
if has_babel:
import babel
try:
locale = get_negotiated_locale([LANG]) or babel.Locale.default()
except babel.UnknownLocaleError:
pass
translation.activate(locale)
admin = TracAdmin()
if len(args) > 0:
if args[0] in ('-h', '--help', 'help'):
return admin.onecmd(' '.join(['help'] + args[1:]))
elif args[0] in ('-v','--version'):
printout(os.path.basename(sys.argv[0]), TRAC_VERSION)
else:
env_path = os.path.abspath(args[0])
try:
unicode(env_path, 'ascii')
except UnicodeDecodeError:
printerr(_("Non-ascii environment path '%(path)s' not "
"supported.", path=to_unicode(env_path)))
sys.exit(2)
admin.env_set(env_path)
if len(args) > 1:
s_args = ' '.join(["'%s'" % c for c in args[2:]])
command = args[1] + ' ' + s_args
return admin.onecmd(command)
else:
while True:
try:
admin.run()
except KeyboardInterrupt:
admin.do_quit('')
else:
return admin.onecmd("help")
if __name__ == '__main__':
pkg_resources.require('Trac==%s' % VERSION)
sys.exit(run())
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/__init__.py | trac/trac/admin/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.admin.api import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/tests/functional.py | trac/trac/admin/tests/functional.py | #!/usr/bin/python
from trac.tests.functional import *
from trac.util.text import unicode_to_base64, unicode_from_base64
class TestBasicSettings(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Check basic settings."""
self._tester.go_to_admin()
tc.formvalue('modbasic', 'url', 'https://my.example.com/something')
tc.submit()
tc.find('https://my.example.com/something')
class TestLoggingNone(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Turn off logging."""
# For now, we just check that it shows up.
self._tester.go_to_admin()
tc.follow('Logging')
tc.find('trac.log')
tc.formvalue('modlog', 'log_type', 'none')
tc.submit()
tc.find('selected="selected">None</option')
class TestLoggingToFile(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Turn logging back on."""
# For now, we just check that it shows up.
self._tester.go_to_admin()
tc.follow('Logging')
tc.find('trac.log')
tc.formvalue('modlog', 'log_type', 'file')
tc.formvalue('modlog', 'log_file', 'trac.log2')
tc.formvalue('modlog', 'log_level', 'INFO')
tc.submit()
tc.find('selected="selected">File</option')
tc.find('id="log_file".*value="trac.log2"')
tc.find('selected="selected">INFO</option>')
class TestLoggingToFileNormal(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Setting logging back to normal."""
# For now, we just check that it shows up.
self._tester.go_to_admin()
tc.follow('Logging')
tc.find('trac.log')
tc.formvalue('modlog', 'log_file', 'trac.log')
tc.formvalue('modlog', 'log_level', 'DEBUG')
tc.submit()
tc.find('selected="selected">File</option')
tc.find('id="log_file".*value="trac.log"')
tc.find('selected="selected">DEBUG</option>')
class TestCreatePermissionGroup(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a permissions group"""
self._tester.go_to_admin()
tc.follow('Permissions')
tc.find('Manage Permissions')
tc.formvalue('addperm', 'gp_subject', 'somegroup')
tc.formvalue('addperm', 'action', 'REPORT_CREATE')
tc.submit()
somegroup = unicode_to_base64('somegroup')
REPORT_CREATE = unicode_to_base64('REPORT_CREATE')
tc.find('%s:%s' % (somegroup, REPORT_CREATE))
class TestAddUserToGroup(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Add a user to a permissions group"""
self._tester.go_to_admin()
tc.follow('Permissions')
tc.find('Manage Permissions')
tc.formvalue('addsubj', 'sg_subject', 'authenticated')
tc.formvalue('addsubj', 'sg_group', 'somegroup')
tc.submit()
authenticated = unicode_to_base64('authenticated')
somegroup = unicode_to_base64('somegroup')
tc.find('%s:%s' % (authenticated, somegroup))
class TestRemoveUserFromGroup(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Remove a user from a permissions group"""
self._tester.go_to_admin()
tc.follow('Permissions')
tc.find('Manage Permissions')
authenticated = unicode_to_base64('authenticated')
somegroup = unicode_to_base64('somegroup')
tc.find('%s:%s' % (authenticated, somegroup))
tc.formvalue('revokeform', 'sel', '%s:%s' % (authenticated, somegroup))
tc.submit()
tc.notfind('%s:%s' % (authenticated, somegroup))
class TestRemovePermissionGroup(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Remove a permissions group"""
self._tester.go_to_admin()
tc.follow('Permissions')
tc.find('Manage Permissions')
somegroup = unicode_to_base64('somegroup')
REPORT_CREATE = unicode_to_base64('REPORT_CREATE')
tc.find('%s:%s' % (somegroup, REPORT_CREATE))
tc.formvalue('revokeform', 'sel', '%s:%s' % (somegroup, REPORT_CREATE))
tc.submit()
tc.notfind('%s:%s' % (somegroup, REPORT_CREATE))
tc.notfind(somegroup)
class TestPluginSettings(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Check plugin settings."""
self._tester.go_to_admin()
tc.follow('Plugins')
tc.find('Manage Plugins')
tc.find('Install Plugin')
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional.testcases
suite = trac.tests.functional.testcases.functionalSuite()
suite.addTest(TestBasicSettings())
suite.addTest(TestLoggingNone())
suite.addTest(TestLoggingToFile())
suite.addTest(TestLoggingToFileNormal())
suite.addTest(TestCreatePermissionGroup())
suite.addTest(TestAddUserToGroup())
suite.addTest(TestRemoveUserFromGroup())
suite.addTest(TestRemovePermissionGroup())
suite.addTest(TestPluginSettings())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='functionalSuite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/tests/console.py | trac/trac/admin/tests/console.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2004-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Tim Moloney <t.moloney@verizon.net>
import difflib
import os
import re
import sys
import unittest
from StringIO import StringIO
# IAdminCommandProvider implementations
import trac.admin.api
import trac.attachment
import trac.perm
import trac.ticket.admin
import trac.versioncontrol.admin
import trac.versioncontrol.api
import trac.versioncontrol.web_ui
import trac.wiki.admin
# IPermissionRequestor implementations (for 'permission' related tests)
import trac.about
import trac.admin.web_ui
import trac.config
import trac.ticket.api
import trac.ticket.report
import trac.ticket.roadmap
import trac.ticket.web_ui
import trac.search.web_ui
import trac.timeline.web_ui
import trac.wiki.web_ui
from trac.admin import console, console_date_format
from trac.test import EnvironmentStub
from trac.util.datefmt import format_date, get_date_format_hint
from trac.web.tests.session import _prep_session_table
STRIP_TRAILING_SPACE = re.compile(r'( +)$', re.MULTILINE)
def load_expected_results(file, pattern):
"""Reads the file, named file, which contains test results separated by the
regular expression pattern.
The test results are returned as a dictionary.
"""
expected = {}
compiled_pattern = re.compile(pattern)
f = open(file, 'r')
for line in f:
line = line.rstrip().decode('utf-8')
match = compiled_pattern.search(line)
if match:
test = match.groups()[0]
expected[test] = ''
else:
expected[test] += line + '\n'
f.close()
return expected
class TracadminTestCase(unittest.TestCase):
"""
Tests the output of trac-admin and is meant to be used with
.../trac/tests.py.
"""
expected_results = load_expected_results(
os.path.join(os.path.split(__file__)[0], 'console-tests.txt'),
'===== (test_[^ ]+) =====')
def setUp(self):
self.env = EnvironmentStub(default_data=True, enable=('trac.*',),
disable=('trac.tests.*',))
self._admin = console.TracAdmin()
self._admin.env_set('', self.env)
# Set test date to 11th Jan 2004
self._test_date = '2004-01-11'
def tearDown(self):
self.env = None
def _execute(self, cmd, strip_trailing_space=True, input=None):
_in = sys.stdin
_err = sys.stderr
_out = sys.stdout
try:
if input:
sys.stdin = StringIO(input.encode('utf-8'))
sys.stdin.encoding = 'utf-8' # fake input encoding
sys.stderr = sys.stdout = out = StringIO()
out.encoding = 'utf-8' # fake output encoding
retval = None
try:
retval = self._admin.onecmd(cmd)
except SystemExit:
pass
value = out.getvalue()
if isinstance(value, str): # reverse what print_listing did
value = value.decode('utf-8')
# DEBUG: uncomment in case of `AssertionError: 0 != 2` in tests
#if retval != 0:
# print>>_err, value
if strip_trailing_space:
return retval, STRIP_TRAILING_SPACE.sub('', value)
else:
return retval, value
finally:
sys.stdin = _in
sys.stderr = _err
sys.stdout = _out
def assertEqual(self, expected_results, output):
if not (isinstance(expected_results, basestring) and \
isinstance(output, basestring)):
return unittest.TestCase.assertEqual(self, expected_results, output)
def diff():
# Create a useful delta between the output and the expected output
output_lines = ['%s\n' % x for x in output.split('\n')]
expected_lines = ['%s\n' % x for x in expected_results.split('\n')]
return ''.join(difflib.unified_diff(expected_lines, output_lines,
'expected', 'actual'))
if '[...]' in expected_results:
m = re.match(expected_results.replace('[...]', '.*'), output,
re.MULTILINE)
unittest.TestCase.assertTrue(self, m,
"%r != %r\n%s" % (expected_results,
output, diff()))
else:
unittest.TestCase.assertEqual(self, expected_results, output,
"%r != %r\n%s" % (expected_results,
output, diff()))
# Help test
def test_help_ok(self):
"""
Tests the 'help' command in trac-admin. Since the 'help' command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
from trac import __version__
test_name = sys._getframe().f_code.co_name
d = {'version': __version__,
'date_format_hint': get_date_format_hint()}
expected_results = self.expected_results[test_name] % d
rv, output = self._execute('help')
self.assertEqual(0, rv)
self.assertEqual(expected_results, output)
# Attachment tests
def test_attachment_list_empty(self):
"""
Tests the 'attachment list' command in trac-admin, on a wiki page that
doesn't have any attachments.
"""
# FIXME: Additional tests should be written for the other 'attachment'
# commands. This requires being able to control the current
# time, which in turn would require centralizing the time
# provider, for example in the environment object.
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('attachment list wiki:WikiStart')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
# Config tests
def test_config_get(self):
"""
Tests the 'config get' command in trac-admin. This particular
test gets the project name from the config.
"""
test_name = sys._getframe().f_code.co_name
self.env.config.set('project', 'name', 'Test project')
rv, output = self._execute('config get project name')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_config_set(self):
"""
Tests the 'config set' command in trac-admin. This particular
test sets the project name using an option value containing a space.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('config set project name "Test project"')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
self.assertEqual('Test project',
self.env.config.get('project', 'name'))
def test_config_remove(self):
"""
Tests the 'config remove' command in trac-admin. This particular
test removes the project name from the config, therefore reverting
the option to the default value.
"""
test_name = sys._getframe().f_code.co_name
self.env.config.set('project', 'name', 'Test project')
rv, output = self._execute('config remove project name')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
self.assertEqual('My Project', self.env.config.get('project', 'name'))
# Permission tests
def test_permission_list_ok(self):
"""
Tests the 'permission list' command in trac-admin. Since this command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_add_one_action_ok(self):
"""
Tests the 'permission add' command in trac-admin. This particular
test passes valid arguments to add one permission and checks for
success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission add test_user WIKI_VIEW')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_add_multiple_actions_ok(self):
"""
Tests the 'permission add' command in trac-admin. This particular
test passes valid arguments to add multiple permissions and checks for
success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission add test_user LOG_VIEW FILE_VIEW')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_add_already_exists(self):
"""
Tests the 'permission add' command in trac-admin. This particular
test passes a permission that already exists and checks for the
message. Other permissions passed are added.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('permission add anonymous WIKI_CREATE '
'WIKI_VIEW WIKI_MODIFY')
self.assertEqual(0, rv)
rv, output2 = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output + output2)
def test_permission_remove_one_action_ok(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test passes valid arguments to remove one permission and checks for
success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission remove anonymous TICKET_MODIFY')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_remove_multiple_actions_ok(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test passes valid arguments to remove multiple permission and checks
for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission remove anonymous WIKI_CREATE WIKI_MODIFY')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_remove_all_actions_for_user(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test removes all permissions for anonymous.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission remove anonymous *')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_remove_action_for_all_users(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test removes the TICKET_CREATE permission from all users.
"""
test_name = sys._getframe().f_code.co_name
self._execute('permission add anonymous TICKET_CREATE')
self._execute('permission remove * TICKET_CREATE')
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_remove_unknown_user(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test tries removing a permission from an unknown user.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('permission remove joe TICKET_VIEW')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_remove_action_not_granted(self):
"""
Tests the 'permission remove' command in trac-admin. This particular
test tries removing TICKET_CREATE from user anonymous, who doesn't
have that permission.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('permission remove anonymous TICKET_CREATE')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_export_ok(self):
"""
Tests the 'permission export' command in trac-admin. This particular
test exports the default permissions to stdout.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('permission export')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_permission_import_ok(self):
"""
Tests the 'permission import' command in trac-admin. This particular
test exports additional permissions, removes them and imports them back.
"""
test_name = sys._getframe().f_code.co_name
user = u'test_user\u0250'
self._execute('permission add ' + user + ' WIKI_VIEW')
self._execute('permission add ' + user + ' TICKET_VIEW')
rv, output = self._execute('permission export')
self._execute('permission remove ' + user + ' *')
rv, output = self._execute('permission import', input=output)
self.assertEqual(0, rv)
self.assertEqual('', output)
rv, output = self._execute('permission list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
# Component tests
def test_component_list_ok(self):
"""
Tests the 'component list' command in trac-admin. Since this command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_add_ok(self):
"""
Tests the 'component add' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('component add new_component new_user')
rv, output = self._execute('component list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_add_error_already_exists(self):
"""
Tests the 'component add' command in trac-admin. This particular
test passes a component name that already exists and checks for an
error message.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component add component1 new_user')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_rename_ok(self):
"""
Tests the 'component rename' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('component rename component1 changed_name')
rv, output = self._execute('component list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_rename_error_bad_component(self):
"""
Tests the 'component rename' command in trac-admin. This particular
test tries to rename a component that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component rename bad_component changed_name')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_rename_error_bad_new_name(self):
"""
Tests the 'component rename' command in trac-admin. This particular
test tries to rename a component to a name that already exists.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component rename component1 component2')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_chown_ok(self):
"""
Tests the 'component chown' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('component chown component2 changed_owner')
rv, output = self._execute('component list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_chown_error_bad_component(self):
"""
Tests the 'component chown' command in trac-admin. This particular
test tries to change the owner of a component that does not
exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component chown bad_component changed_owner')
self.assertEqual(2, rv)
# We currently trigger a deprecation warning with py26 so we
# can currrently only verify that the end of the output string is
# correct
self.assertEqual(output.endswith(self.expected_results[test_name]), True)
def test_component_remove_ok(self):
"""
Tests the 'component remove' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('component remove component1')
rv, output = self._execute('component list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_component_remove_error_bad_component(self):
"""
Tests the 'component remove' command in trac-admin. This particular
test tries to remove a component that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('component remove bad_component')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
# Ticket-type tests
def test_ticket_type_list_ok(self):
"""
Tests the 'ticket_type list' command in trac-admin. Since this command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_add_ok(self):
"""
Tests the 'ticket_type add' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('ticket_type add new_type')
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_add_error_already_exists(self):
"""
Tests the 'ticket_type add' command in trac-admin. This particular
test passes a ticket type that already exists and checks for an error
message.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type add defect')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_change_ok(self):
"""
Tests the 'ticket_type change' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('ticket_type change defect bug')
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_change_error_bad_type(self):
"""
Tests the 'ticket_type change' command in trac-admin. This particular
test tries to change a priority that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type change bad_type changed_type')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_change_error_bad_new_name(self):
"""
Tests the 'ticket_type change' command in trac-admin. This particular
test tries to change a ticket type to another type that already exists.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type change defect task')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_remove_ok(self):
"""
Tests the 'ticket_type remove' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('ticket_type remove task')
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_remove_error_bad_type(self):
"""
Tests the 'ticket_type remove' command in trac-admin. This particular
test tries to remove a ticket type that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type remove bad_type')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_order_down_ok(self):
"""
Tests the 'ticket_type order' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('ticket_type order defect down')
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_order_up_ok(self):
"""
Tests the 'ticket_type order' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('ticket_type order enhancement up')
rv, output = self._execute('ticket_type list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_ticket_type_order_error_bad_type(self):
"""
Tests the 'priority order' command in trac-admin. This particular
test tries to reorder a priority that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('ticket_type order bad_type up')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
# Priority tests
def test_priority_list_ok(self):
"""
Tests the 'priority list' command in trac-admin. Since this command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_add_ok(self):
"""
Tests the 'priority add' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('priority add new_priority')
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_add_many_ok(self):
"""
Tests adding more than 10 priority values. This makes sure that
ordering is preserved when adding more than 10 values.
"""
test_name = sys._getframe().f_code.co_name
for i in xrange(11):
self._execute('priority add p%s' % i)
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_add_error_already_exists(self):
"""
Tests the 'priority add' command in trac-admin. This particular
test passes a priority name that already exists and checks for an
error message.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority add blocker')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_change_ok(self):
"""
Tests the 'priority change' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('priority change major normal')
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_change_error_bad_priority(self):
"""
Tests the 'priority change' command in trac-admin. This particular
test tries to change a priority that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority change bad_priority changed_name')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_change_error_bad_new_name(self):
"""
Tests the 'priority change' command in trac-admin. This particular
test tries to change a priority to a name that already exists.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority change major minor')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_remove_ok(self):
"""
Tests the 'priority remove' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('priority remove major')
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_remove_error_bad_priority(self):
"""
Tests the 'priority remove' command in trac-admin. This particular
test tries to remove a priority that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority remove bad_priority')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_order_down_ok(self):
"""
Tests the 'priority order' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('priority order blocker down')
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_order_up_ok(self):
"""
Tests the 'priority order' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('priority order critical up')
rv, output = self._execute('priority list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_priority_order_error_bad_priority(self):
"""
Tests the 'priority order' command in trac-admin. This particular
test tries to reorder a priority that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('priority remove bad_priority')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
# Severity tests
def test_severity_list_ok(self):
"""
Tests the 'severity list' command in trac-admin. Since this command
has no command arguments, it is hard to call it incorrectly. As
a result, there is only this one test.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('severity list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_severity_add_ok(self):
"""
Tests the 'severity add' command in trac-admin. This particular
test passes a valid argument and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('severity add new_severity')
rv, output = self._execute('severity list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_severity_add_error_already_exists(self):
"""
Tests the 'severity add' command in trac-admin. This particular
test passes a severity name that already exists and checks for an
error message.
"""
test_name = sys._getframe().f_code.co_name
self._execute('severity add blocker')
rv, output = self._execute('severity add blocker')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_severity_change_ok(self):
"""
Tests the 'severity add' command in trac-admin. This particular
test passes valid arguments and checks for success.
"""
test_name = sys._getframe().f_code.co_name
self._execute('severity add critical')
self._execute('severity change critical "end-of-the-world"')
rv, output = self._execute('severity list')
self.assertEqual(0, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_severity_change_error_bad_severity(self):
"""
Tests the 'severity change' command in trac-admin. This particular
test tries to change a severity that does not exist.
"""
test_name = sys._getframe().f_code.co_name
rv, output = self._execute('severity change bad_severity changed_name')
self.assertEqual(2, rv)
self.assertEqual(self.expected_results[test_name], output)
def test_severity_change_error_bad_new_name(self):
"""
Tests the 'severity change' command in trac-admin. This particular
test tries to change a severity to a name that already exists.
"""
test_name = sys._getframe().f_code.co_name
self._execute('severity add major')
self._execute('severity add critical')
rv, output = self._execute('severity change critical major')
self.assertEqual(2, rv)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/admin/tests/__init__.py | trac/trac/admin/tests/__init__.py | import unittest
from trac.admin.tests import console
from trac.admin.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(console.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/web_api.py | trac/trac/wiki/web_api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.resource import Resource
from trac.util import as_int
from trac.web.api import IRequestHandler
from trac.web.chrome import web_context
from trac.wiki.formatter import format_to
class WikiRenderer(Component):
"""Wiki text renderer."""
implements(IRequestHandler)
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/wiki_render'
def process_request(self, req):
# Allow all POST requests (with a valid __FORM_TOKEN, ensuring that
# the client has at least some permission). Additionally, allow GET
# requests from TRAC_ADMIN for testing purposes.
if req.method != 'POST':
req.perm.require('TRAC_ADMIN')
realm = req.args.get('realm', 'wiki')
id = req.args.get('id')
version = as_int(req.args.get('version'), None)
text = req.args.get('text', '')
flavor = req.args.get('flavor')
options = {}
if 'escape_newlines' in req.args:
options['escape_newlines'] = bool(int(req.args['escape_newlines']
or 0))
if 'shorten' in req.args:
options['shorten'] = bool(int(req.args['shorten'] or 0))
resource = Resource(realm, id=id, version=version)
context = web_context(req, resource)
rendered = format_to(self.env, flavor, context, text, **options)
req.send(rendered.encode('utf-8'))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/api.py | trac/trac/wiki/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
import re
from genshi.builder import tag
from trac.cache import cached
from trac.config import BoolOption, ListOption
from trac.core import *
from trac.resource import IResourceManager
from trac.util.text import unquote_label
from trac.util.translation import _
from .parser import WikiParser
class IWikiChangeListener(Interface):
"""Components that want to get notified about the creation,
deletion and modification of wiki pages should implement that
interface.
"""
def wiki_page_added(page):
"""Called whenever a new Wiki page is added."""
def wiki_page_changed(page, version, t, comment, author, ipnr):
"""Called when a page has been modified."""
def wiki_page_deleted(page):
"""Called when a page has been deleted."""
def wiki_page_version_deleted(page):
"""Called when a version of a page has been deleted."""
def wiki_page_renamed(page, old_name):
"""Called when a page has been renamed."""
class IWikiPageManipulator(Interface):
"""Components that need to do specific pre- and post- processing of
wiki page changes have to implement this interface.
Unlike change listeners, a manipulator can reject changes being
committed to the database.
"""
def prepare_wiki_page(req, page, fields):
"""Validate a wiki page before rendering it.
:param page: is the `WikiPage` being viewed.
:param fields: is a dictionary which contains the wiki `text`
of the page, initially identical to `page.text` but it can
eventually be transformed in place before being used as
input to the formatter.
"""
def validate_wiki_page(req, page):
"""Validate a wiki page after it's been populated from user input.
:param page: is the `WikiPage` being edited.
:return: a list of `(field, message)` tuples, one for each
problem detected. `field` can be `None` to indicate an
overall problem with the page. Therefore, a return value of
`[]` means everything is OK.
"""
class IWikiMacroProvider(Interface):
"""Augment the Wiki markup with new Wiki macros.
.. versionchanged :: 0.12
new Wiki processors can also be added that way.
"""
def get_macros():
"""Return an iterable that provides the names of the provided macros.
"""
def get_macro_description(name):
"""Return a tuple of a domain name to translate and plain text
description of the macro or only the description with the specified
name.
.. versionchanged :: 1.0
`get_macro_description` can return a domain to translate the
description.
"""
def render_macro(req, name, content):
"""Return the HTML output of the macro :deprecated:"""
def is_inline(content):
"""Return `True` if the content generated is an inline XHTML element.
.. versionadded :: 1.0
"""
def expand_macro(formatter, name, content, args=None):
"""Called by the formatter when rendering the parsed wiki text.
.. versionadded:: 0.11
This form is preferred over `render_macro`, as
you get the `formatter`, which knows the current `.context`
(and the `.req`, but ideally you shouldn't use it in your
macros).
.. versionchanged:: 0.12
added the `args` parameter
:param formatter: the wiki `Formatter` currently processing
the wiki markup
:param name: is the name by which the macro has been called;
remember that via `get_macros`, multiple names could be
associated to this macros. Note that the macro names are
case sensitive.
:param content: is the content of the macro call. When called
using macro syntax (`[[Macro(content)]]`), this is the
string contained between parentheses, usually containing
macro arguments. When called using wiki processor syntax
(`{{{!#Macro ...}}}`), it is the content of the processor
block, that is, the text starting on the line following the
macro name.
:param args: will be a dictionary containing the named
parameters passed when using the Wiki processor syntax.
The named parameters can be specified when calling the macro
using the wiki processor syntax::
{{{#!Macro arg1=value1 arg2="value 2"`
... some content ...
}}}
In this example, `args` will be
`{'arg1': 'value1', 'arg2': 'value 2'}`
and `content` will be `"... some content ..."`.
If no named parameters are given like in::
{{{#!Macro
...
}}}
then `args` will be `{}`. That makes it possible to
differentiate the above situation from a call
made using the macro syntax::
[[Macro(arg1=value1, arg2="value 2", ... some content...)]]
in which case `args` will always be `None`. Here `content`
will be the
`"arg1=value1, arg2="value 2", ... some content..."` string.
If like in this example, `content` is expected to contain
some arguments and named parameters, one can use the
`parse_args` function to conveniently extract them.
"""
class IWikiSyntaxProvider(Interface):
"""Enrich the Wiki syntax with new markup."""
def get_wiki_syntax():
"""Return an iterable that provides additional wiki syntax.
Additional wiki syntax correspond to a pair of `(regexp, cb)`,
the `regexp` for the additional syntax and the callback `cb`
which will be called if there's a match. That function is of
the form `cb(formatter, ns, match)`.
"""
def get_link_resolvers():
"""Return an iterable over `(namespace, formatter)` tuples.
Each formatter should be a function of the form::
def format(formatter, ns, target, label, fullmatch=None):
pass
and should return some HTML fragment. The `label` is already
HTML escaped, whereas the `target` is not. The `fullmatch`
argument is optional, and is bound to the regexp match object
for the link.
"""
def parse_args(args, strict=True):
"""Utility for parsing macro "content" and splitting them into arguments.
The content is split along commas, unless they are escaped with a
backquote (see example below).
:param args: a string containing macros arguments
:param strict: if `True`, only Python-like identifiers will be
recognized as keyword arguments
Example usage::
>>> parse_args('')
([], {})
>>> parse_args('Some text')
(['Some text'], {})
>>> parse_args('Some text, mode= 3, some other arg\, with a comma.')
(['Some text', ' some other arg, with a comma.'], {'mode': ' 3'})
>>> parse_args('milestone=milestone1,status!=closed', strict=False)
([], {'status!': 'closed', 'milestone': 'milestone1'})
"""
largs, kwargs = [], {}
if args:
for arg in re.split(r'(?<!\\),', args):
arg = arg.replace(r'\,', ',')
if strict:
m = re.match(r'\s*[a-zA-Z_]\w+=', arg)
else:
m = re.match(r'\s*[^=]+=', arg)
if m:
kw = arg[:m.end()-1].strip()
if strict:
kw = unicode(kw).encode('utf-8')
kwargs[kw] = arg[m.end():]
else:
largs.append(arg)
return largs, kwargs
def validate_page_name(pagename):
"""Utility for validating wiki page name.
:param pagename: wiki page name to validate
"""
return pagename and \
all(part not in ('', '.', '..') for part in pagename.split('/'))
class WikiSystem(Component):
"""Wiki system manager."""
implements(IWikiSyntaxProvider, IResourceManager)
change_listeners = ExtensionPoint(IWikiChangeListener)
macro_providers = ExtensionPoint(IWikiMacroProvider)
syntax_providers = ExtensionPoint(IWikiSyntaxProvider)
ignore_missing_pages = BoolOption('wiki', 'ignore_missing_pages', 'false',
"""Enable/disable highlighting CamelCase links to missing pages
(''since 0.9'').""")
split_page_names = BoolOption('wiki', 'split_page_names', 'false',
"""Enable/disable splitting the WikiPageNames with space characters
(''since 0.10'').""")
render_unsafe_content = BoolOption('wiki', 'render_unsafe_content', 'false',
"""Enable/disable the use of unsafe HTML tags such as `<script>` or
`<embed>` with the HTML [wiki:WikiProcessors WikiProcessor]
(''since 0.10.4'').
For public sites where anonymous users can edit the wiki it is
recommended to leave this option disabled (which is the default).""")
safe_schemes = ListOption('wiki', 'safe_schemes',
'cvs, file, ftp, git, irc, http, https, news, sftp, smb, ssh, svn, '
'svn+ssh',
doc="""List of URI schemes considered "safe", that will be rendered as
external links even if `[wiki] render_unsafe_content` is `false`.
(''since 0.11.8'')""")
@cached
def pages(self):
"""Return the names of all existing wiki pages."""
return set(name for name, in
self.env.db_query("SELECT DISTINCT name FROM wiki"))
# Public API
def get_pages(self, prefix=None):
"""Iterate over the names of existing Wiki pages.
:param prefix: if given, only names that start with that
prefix are included.
"""
for page in self.pages:
if not prefix or page.startswith(prefix):
yield page
def has_page(self, pagename):
"""Whether a page with the specified name exists."""
return pagename.rstrip('/') in self.pages
# IWikiSyntaxProvider methods
XML_NAME = r"[\w:](?<!\d)(?:[\w:.-]*[\w-])?"
# See http://www.w3.org/TR/REC-xml/#id,
# here adapted to exclude terminal "." and ":" characters
PAGE_SPLIT_RE = re.compile(r"([a-z])([A-Z])(?=[a-z])")
Lu = ''.join(unichr(c) for c in range(0, 0x10000) if unichr(c).isupper())
Ll = ''.join(unichr(c) for c in range(0, 0x10000) if unichr(c).islower())
def format_page_name(self, page, split=False):
if split or self.split_page_names:
return self.PAGE_SPLIT_RE.sub(r"\1 \2", page)
return page
def make_label_from_target(self, target):
"""Create a label from a wiki target.
A trailing fragment and query string is stripped. Then, leading ./,
../ and / elements are stripped, except when this would lead to an
empty label. Finally, if `split_page_names` is true, the label
is split accordingly.
"""
label = target.split('#', 1)[0].split('?', 1)[0]
if not label:
return target
components = label.split('/')
for i, comp in enumerate(components):
if comp not in ('', '.', '..'):
label = '/'.join(components[i:])
break
return self.format_page_name(label)
def get_wiki_syntax(self):
wiki_page_name = (
r"(?:[%(upper)s](?:[%(lower)s])+/?){2,}" # wiki words
r"(?:@[0-9]+)?" # optional version
r"(?:#%(xml)s)?" # optional fragment id
r"(?=:(?:\Z|\s)|[^:\w%(upper)s%(lower)s]|\s|\Z)"
# what should follow it
% {'upper': self.Lu, 'lower': self.Ll, 'xml': self.XML_NAME})
# Regular WikiPageNames
def wikipagename_link(formatter, match, fullmatch):
return self._format_link(formatter, 'wiki', match,
self.format_page_name(match),
self.ignore_missing_pages, match)
# Start after any non-word char except '/', with optional relative or
# absolute prefix
yield (r"!?(?<![\w/])(?:\.?\.?/)*"
+ wiki_page_name, wikipagename_link)
# [WikiPageNames with label]
def wikipagename_with_label_link(formatter, match, fullmatch):
page = fullmatch.group('wiki_page')
label = fullmatch.group('wiki_label')
return self._format_link(formatter, 'wiki', page, label.strip(),
self.ignore_missing_pages, match)
yield (r"!?\[(?P<wiki_page>%s)\s+(?P<wiki_label>%s|[^\]]+)\]"
% (wiki_page_name, WikiParser.QUOTED_STRING),
wikipagename_with_label_link)
# MoinMoin's ["internal free link"] and ["free link" with label]
def internal_free_link(fmt, m, fullmatch):
page = fullmatch.group('ifl_page')[1:-1]
label = fullmatch.group('ifl_label')
if label is None:
label = self.make_label_from_target(page)
return self._format_link(fmt, 'wiki', page, label.strip(), False)
yield (r"!?\[(?P<ifl_page>%s)(?:\s+(?P<ifl_label>%s|[^\]]+))?\]"
% (WikiParser.QUOTED_STRING, WikiParser.QUOTED_STRING),
internal_free_link)
def get_link_resolvers(self):
def link_resolver(formatter, ns, target, label, fullmatch=None):
if fullmatch is not None:
# If no explicit label was specified for a [wiki:...] link,
# generate a "nice" label instead of keeping the label
# generated by the Formatter (usually the target itself).
groups = fullmatch.groupdict()
if groups.get('lns') and not groups.get('label'):
label = self.make_label_from_target(target)
return self._format_link(formatter, ns, target, label, False)
yield ('wiki', link_resolver)
def _format_link(self, formatter, ns, pagename, label, ignore_missing,
original_label=None):
pagename, query, fragment = formatter.split_link(pagename)
version = None
if '@' in pagename:
pagename, version = pagename.split('@', 1)
if version and query:
query = '&' + query[1:]
pagename = pagename.rstrip('/') or 'WikiStart'
referrer = ''
if formatter.resource and formatter.resource.realm == 'wiki':
referrer = formatter.resource.id
if pagename.startswith('/'):
pagename = pagename.lstrip('/')
elif pagename.startswith(('./', '../')) or pagename in ('.', '..'):
pagename = self._resolve_relative_name(pagename, referrer)
else:
pagename = self._resolve_scoped_name(pagename, referrer)
label = unquote_label(label)
if 'WIKI_VIEW' in formatter.perm('wiki', pagename, version):
href = formatter.href.wiki(pagename, version=version) + query \
+ fragment
if self.has_page(pagename):
return tag.a(label, href=href, class_='wiki')
else:
if ignore_missing:
return original_label or label
if 'WIKI_CREATE' in formatter.perm('wiki', pagename, version):
return tag.a(label + '?', class_='missing wiki',
href=href, rel='nofollow')
else:
return tag.a(label + '?', class_='missing wiki')
elif ignore_missing and not self.has_page(pagename):
return original_label or label
else:
return tag.a(label, class_='forbidden wiki',
title=_("no permission to view this wiki page"))
def _resolve_relative_name(self, pagename, referrer):
base = referrer.split('/')
components = pagename.split('/')
for i, comp in enumerate(components):
if comp == '..':
if base:
base.pop()
elif comp and comp != '.':
base.extend(components[i:])
break
return '/'.join(base)
def _resolve_scoped_name(self, pagename, referrer):
referrer = referrer.split('/')
if len(referrer) == 1: # Non-hierarchical referrer
return pagename
# Test for pages with same name, higher in the hierarchy
for i in range(len(referrer) - 1, 0, -1):
name = '/'.join(referrer[:i]) + '/' + pagename
if self.has_page(name):
return name
if self.has_page(pagename):
return pagename
# If we are on First/Second/Third, and pagename is Second/Other,
# resolve to First/Second/Other instead of First/Second/Second/Other
# See http://trac.edgewall.org/ticket/4507#comment:12
if '/' in pagename:
(first, rest) = pagename.split('/', 1)
for (i, part) in enumerate(referrer):
if first == part:
anchor = '/'.join(referrer[:i + 1])
if self.has_page(anchor):
return anchor + '/' + rest
# Assume the user wants a sibling of referrer
return '/'.join(referrer[:-1]) + '/' + pagename
# IResourceManager methods
def get_resource_realms(self):
yield 'wiki'
def get_resource_description(self, resource, format, **kwargs):
"""
>>> from trac.test import EnvironmentStub
>>> from trac.resource import Resource, get_resource_description
>>> env = EnvironmentStub()
>>> main = Resource('wiki', 'WikiStart')
>>> get_resource_description(env, main)
'WikiStart'
>>> get_resource_description(env, main(version=3))
'WikiStart'
>>> get_resource_description(env, main(version=3), format='summary')
'WikiStart'
>>> env.config['wiki'].set('split_page_names', 'true')
>>> get_resource_description(env, main(version=3))
'Wiki Start'
"""
return self.format_page_name(resource.id)
def resource_exists(self, resource):
"""
>>> from trac.test import EnvironmentStub
>>> from trac.resource import Resource, resource_exists
>>> env = EnvironmentStub()
>>> resource_exists(env, Resource('wiki', 'WikiStart'))
False
>>> from trac.wiki.model import WikiPage
>>> main = WikiPage(env, 'WikiStart')
>>> main.text = 'some content'
>>> main.save('author', 'no comment', '::1')
>>> resource_exists(env, main.resource)
True
"""
if resource.version is None:
return resource.id in self.pages
return bool(self.env.db_query(
"SELECT name FROM wiki WHERE name=%s AND version=%s",
(resource.id, resource.version)))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/web_ui.py | trac/trac/wiki/web_ui.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
from __future__ import with_statement
import pkg_resources
import re
from genshi.builder import tag
from trac.attachment import AttachmentModule
from trac.config import IntOption
from trac.core import *
from trac.mimeview.api import IContentConverter, Mimeview
from trac.perm import IPermissionRequestor
from trac.resource import *
from trac.search import ISearchSource, search_to_sql, shorten_result
from trac.timeline.api import ITimelineEventProvider
from trac.util import get_reporter_id
from trac.util.datefmt import from_utimestamp, to_utimestamp
from trac.util.text import shorten_line
from trac.util.translation import _, tag_
from trac.versioncontrol.diff import get_diff_options, diff_blocks
from trac.web.api import IRequestHandler
from trac.web.chrome import (Chrome, INavigationContributor, ITemplateProvider,
add_ctxtnav, add_link, add_notice, add_script,
add_stylesheet, add_warning, prevnext_nav,
web_context)
from trac.wiki.api import IWikiPageManipulator, WikiSystem, validate_page_name
from trac.wiki.formatter import format_to, OneLinerFormatter
from trac.wiki.model import WikiPage
class InvalidWikiPage(TracError):
"""Exception raised when a Wiki page fails validation.
:deprecated: Not used anymore since 0.11
"""
class WikiModule(Component):
implements(IContentConverter, INavigationContributor, IPermissionRequestor,
IRequestHandler, ITimelineEventProvider, ISearchSource,
ITemplateProvider)
page_manipulators = ExtensionPoint(IWikiPageManipulator)
max_size = IntOption('wiki', 'max_size', 262144,
"""Maximum allowed wiki page size in bytes. (''since 0.11.2'')""")
PAGE_TEMPLATES_PREFIX = 'PageTemplates/'
DEFAULT_PAGE_TEMPLATE = 'DefaultPage'
# IContentConverter methods
def get_supported_conversions(self):
yield ('txt', _('Plain Text'), 'txt', 'text/x-trac-wiki', 'text/plain',
9)
def convert_content(self, req, mimetype, content, key):
return (content, 'text/plain;charset=utf-8')
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'wiki'
def get_navigation_items(self, req):
if 'WIKI_VIEW' in req.perm('wiki'):
yield ('mainnav', 'wiki',
tag.a(_('Wiki'), href=req.href.wiki(), accesskey=1))
yield ('metanav', 'help',
tag.a(_('Help/Guide'), href=req.href.wiki('TracGuide'),
accesskey=6))
# IPermissionRequestor methods
def get_permission_actions(self):
actions = ['WIKI_CREATE', 'WIKI_DELETE', 'WIKI_MODIFY', 'WIKI_RENAME',
'WIKI_VIEW']
return actions + [('WIKI_ADMIN', actions)]
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/wiki(?:/(.+))?$', req.path_info)
if match:
if match.group(1):
req.args['page'] = match.group(1)
return 1
def process_request(self, req):
action = req.args.get('action', 'view')
pagename = req.args.get('page', 'WikiStart')
version = req.args.get('version')
old_version = req.args.get('old_version')
if pagename.startswith('/') or pagename.endswith('/') or \
'//' in pagename:
pagename = re.sub(r'/{2,}', '/', pagename.strip('/'))
req.redirect(req.href.wiki(pagename))
if not validate_page_name(pagename):
raise TracError(_("Invalid Wiki page name '%(name)s'",
name=pagename))
page = WikiPage(self.env, pagename)
versioned_page = WikiPage(self.env, pagename, version=version)
req.perm(page.resource).require('WIKI_VIEW')
req.perm(versioned_page.resource).require('WIKI_VIEW')
if version and versioned_page.version != int(version):
raise ResourceNotFound(
_('No version "%(num)s" for Wiki page "%(name)s"',
num=version, name=page.name))
add_stylesheet(req, 'common/css/wiki.css')
if req.method == 'POST':
if action == 'edit':
if 'cancel' in req.args:
req.redirect(req.href.wiki(page.name))
has_collision = int(version) != page.version
for a in ('preview', 'diff', 'merge'):
if a in req.args:
action = a
break
versioned_page.text = req.args.get('text')
valid = self._validate(req, versioned_page)
if action == 'edit' and not has_collision and valid:
return self._do_save(req, versioned_page)
else:
return self._render_editor(req, page, action, has_collision)
elif action == 'delete':
self._do_delete(req, versioned_page)
elif action == 'rename':
return self._do_rename(req, page)
elif action == 'diff':
style, options, diff_data = get_diff_options(req)
contextall = diff_data['options']['contextall']
req.redirect(req.href.wiki(versioned_page.name, action='diff',
old_version=old_version,
version=version,
contextall=contextall or None))
elif action == 'delete':
return self._render_confirm_delete(req, page)
elif action == 'rename':
return self._render_confirm_rename(req, page)
elif action == 'edit':
return self._render_editor(req, page)
elif action == 'diff':
return self._render_diff(req, versioned_page)
elif action == 'history':
return self._render_history(req, versioned_page)
else:
format = req.args.get('format')
if format:
Mimeview(self.env).send_converted(req, 'text/x-trac-wiki',
versioned_page.text,
format, versioned_page.name)
return self._render_view(req, versioned_page)
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.wiki', 'templates')]
# Internal methods
def _validate(self, req, page):
valid = True
# Validate page size
if len(req.args.get('text', '')) > self.max_size:
add_warning(req, _('The wiki page is too long (must be less '
'than %(num)s characters)',
num=self.max_size))
valid = False
# Give the manipulators a pass at post-processing the page
for manipulator in self.page_manipulators:
for field, message in manipulator.validate_wiki_page(req, page):
valid = False
if field:
add_warning(req, _("The Wiki page field '%(field)s' is "
"invalid: %(message)s",
field=field, message=message))
else:
add_warning(req, _("Invalid Wiki page: %(message)s",
message=message))
return valid
def _page_data(self, req, page, action=''):
title = get_resource_summary(self.env, page.resource)
if action:
title += ' (%s)' % action
return {'page': page, 'action': action, 'title': title}
def _prepare_diff(self, req, page, old_text, new_text,
old_version, new_version):
diff_style, diff_options, diff_data = get_diff_options(req)
diff_context = 3
for option in diff_options:
if option.startswith('-U'):
diff_context = int(option[2:])
break
if diff_context < 0:
diff_context = None
diffs = diff_blocks(old_text, new_text, context=diff_context,
ignore_blank_lines='-B' in diff_options,
ignore_case='-i' in diff_options,
ignore_space_changes='-b' in diff_options)
def version_info(v, last=0):
return {'path': get_resource_name(self.env, page.resource),
# TRANSLATOR: wiki page
'rev': v or _('currently edited'),
'shortrev': v or last + 1,
'href': req.href.wiki(page.name, version=v) if v else None}
changes = [{'diffs': diffs, 'props': [],
'new': version_info(new_version, old_version),
'old': version_info(old_version)}]
add_stylesheet(req, 'common/css/diff.css')
add_script(req, 'common/js/diff.js')
return diff_data, changes
def _do_delete(self, req, page):
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
else:
req.perm(page.resource).require('WIKI_DELETE')
if 'cancel' in req.args:
req.redirect(get_resource_url(self.env, page.resource, req.href))
version = int(req.args.get('version', 0)) or None
old_version = int(req.args.get('old_version', 0)) or version
with self.env.db_transaction as db:
if version and old_version and version > old_version:
# delete from `old_version` exclusive to `version` inclusive:
for v in range(old_version, version):
page.delete(v + 1, db)
else:
# only delete that `version`, or the whole page if `None`
page.delete(version, db)
if not page.exists:
add_notice(req, _("The page %(name)s has been deleted.",
name=page.name))
req.redirect(req.href.wiki())
else:
if version and old_version and version > old_version + 1:
add_notice(req, _('The versions %(from_)d to %(to)d of the '
'page %(name)s have been deleted.',
from_=old_version + 1, to=version, name=page.name))
else:
add_notice(req, _('The version %(version)d of the page '
'%(name)s has been deleted.',
version=version, name=page.name))
req.redirect(req.href.wiki(page.name))
def _do_rename(self, req, page):
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
else:
req.perm(page.resource).require('WIKI_RENAME')
if 'cancel' in req.args:
req.redirect(get_resource_url(self.env, page.resource, req.href))
old_name, old_version = page.name, page.version
new_name = req.args.get('new_name', '')
new_name = re.sub(r'/{2,}', '/', new_name.strip('/'))
redirect = req.args.get('redirect')
# verify input parameters
warn = None
if not new_name:
warn = _("A new name is mandatory for a rename.")
elif not validate_page_name(new_name):
warn = _("The new name is invalid (a name which is separated "
"with slashes cannot be '.' or '..').")
elif new_name == old_name:
warn = _("The new name must be different from the old name.")
elif WikiPage(self.env, new_name).exists:
warn = _("The page %(name)s already exists.", name=new_name)
if warn:
add_warning(req, warn)
return self._render_confirm_rename(req, page, new_name)
with self.env.db_transaction as db:
page.rename(new_name)
if redirect:
redirection = WikiPage(self.env, old_name, db=db)
redirection.text = _('See [wiki:"%(name)s"].', name=new_name)
author = get_reporter_id(req)
comment = u'[wiki:"%s@%d" %s] \u2192 [wiki:"%s"].' % (
new_name, old_version, old_name, new_name)
redirection.save(author, comment, req.remote_addr)
req.redirect(req.href.wiki(old_name if redirect else new_name))
def _do_save(self, req, page):
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
elif not page.exists:
req.perm(page.resource).require('WIKI_CREATE')
else:
req.perm(page.resource).require('WIKI_MODIFY')
if 'WIKI_ADMIN' in req.perm(page.resource):
# Modify the read-only flag if it has been changed and the user is
# WIKI_ADMIN
page.readonly = int('readonly' in req.args)
try:
page.save(get_reporter_id(req, 'author'), req.args.get('comment'),
req.remote_addr)
add_notice(req, _("Your changes have been saved in version "
"%(version)s.", version=page.version))
req.redirect(get_resource_url(self.env, page.resource, req.href,
version=None))
except TracError:
add_warning(req, _("Page not modified, showing latest version."))
return self._render_view(req, page)
def _render_confirm_delete(self, req, page):
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
else:
req.perm(page.resource).require('WIKI_DELETE')
version = None
if 'delete_version' in req.args:
version = int(req.args.get('version', 0))
old_version = int(req.args.get('old_version') or 0) or version
what = 'multiple' if version and old_version \
and version - old_version > 1 \
else 'single' if version else 'page'
num_versions = 0
new_date = None
old_date = None
for v, t, author, comment, ipnr in page.get_history():
if (v <= version or what == 'page') and new_date is None:
new_date = t
if (v <= old_version and what == 'multiple' or
num_versions > 1 and what == 'single'):
break
num_versions += 1
old_date = t
data = self._page_data(req, page, 'delete')
data.update({'what': what, 'new_version': None, 'old_version': None,
'num_versions': num_versions, 'new_date': new_date,
'old_date': old_date})
if version is not None:
data.update({'new_version': version, 'old_version': old_version})
self._wiki_ctxtnav(req, page)
return 'wiki_delete.html', data, None
def _render_confirm_rename(self, req, page, new_name=None):
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
else:
req.perm(page.resource).require('WIKI_RENAME')
data = self._page_data(req, page, 'rename')
data['new_name'] = new_name if new_name is not None else page.name
self._wiki_ctxtnav(req, page)
return 'wiki_rename.html', data, None
def _render_diff(self, req, page):
if not page.exists:
raise TracError(_('Version %(num)s of page "%(name)s" does not '
'exist',
num=req.args.get('version'), name=page.name))
old_version = req.args.get('old_version')
if old_version:
old_version = int(old_version)
if old_version == page.version:
old_version = None
elif old_version > page.version:
# FIXME: what about reverse diffs?
old_version = page.resource.version
page = WikiPage(self.env, page.name, version=old_version)
req.perm(page.resource).require('WIKI_VIEW')
latest_page = WikiPage(self.env, page.name, version=None)
req.perm(latest_page.resource).require('WIKI_VIEW')
new_version = int(page.version)
date = author = comment = ipnr = None
num_changes = 0
prev_version = next_version = None
for version, t, a, c, i in latest_page.get_history():
if version == new_version:
date = t
author = a or 'anonymous'
comment = c or '--'
ipnr = i or ''
else:
if version < new_version:
num_changes += 1
if not prev_version:
prev_version = version
if old_version is None or version == old_version:
old_version = version
break
else:
next_version = version
if not old_version:
old_version = 0
old_page = WikiPage(self.env, page.name, old_version)
req.perm(old_page.resource).require('WIKI_VIEW')
# -- text diffs
old_text = old_page.text.splitlines()
new_text = page.text.splitlines()
diff_data, changes = self._prepare_diff(req, page, old_text, new_text,
old_version, new_version)
# -- prev/up/next links
if prev_version:
add_link(req, 'prev', req.href.wiki(page.name, action='diff',
version=prev_version),
_('Version %(num)s', num=prev_version))
add_link(req, 'up', req.href.wiki(page.name, action='history'),
_('Page history'))
if next_version:
add_link(req, 'next', req.href.wiki(page.name, action='diff',
version=next_version),
_('Version %(num)s', num=next_version))
data = self._page_data(req, page, 'diff')
data.update({
'change': {'date': date, 'author': author, 'ipnr': ipnr,
'comment': comment},
'new_version': new_version, 'old_version': old_version,
'latest_version': latest_page.version,
'num_changes': num_changes,
'longcol': 'Version', 'shortcol': 'v',
'changes': changes,
'diff': diff_data,
})
prevnext_nav(req, _('Previous Change'), _('Next Change'),
_('Wiki History'))
return 'wiki_diff.html', data, None
def _render_editor(self, req, page, action='edit', has_collision=False):
if has_collision:
if action == 'merge':
page = WikiPage(self.env, page.name, version=None)
req.perm(page.resource).require('WIKI_VIEW')
else:
action = 'collision'
if page.readonly:
req.perm(page.resource).require('WIKI_ADMIN')
else:
req.perm(page.resource).require('WIKI_MODIFY')
original_text = page.text
comment = req.args.get('comment', '')
if 'text' in req.args:
page.text = req.args.get('text')
elif 'template' in req.args:
template = self.PAGE_TEMPLATES_PREFIX + req.args.get('template')
template_page = WikiPage(self.env, template)
if template_page and template_page.exists and \
'WIKI_VIEW' in req.perm(template_page.resource):
page.text = template_page.text
elif 'version' in req.args:
old_page = WikiPage(self.env, page.name,
version=int(req.args['version']))
req.perm(page.resource).require('WIKI_VIEW')
page.text = old_page.text
comment = _("Reverted to version %(version)s.",
version=req.args['version'])
if action in ('preview', 'diff'):
page.readonly = 'readonly' in req.args
author = get_reporter_id(req, 'author')
defaults = {'editrows': 20}
prefs = dict((key, req.session.get('wiki_%s' % key, defaults.get(key)))
for key in ('editrows', 'sidebyside'))
if 'from_editor' in req.args:
sidebyside = req.args.get('sidebyside') or None
if sidebyside != prefs['sidebyside']:
req.session.set('wiki_sidebyside', int(bool(sidebyside)), 0)
else:
sidebyside = prefs['sidebyside']
if sidebyside:
editrows = max(int(prefs['editrows']),
len(page.text.splitlines()) + 1)
else:
editrows = req.args.get('editrows')
if editrows:
if editrows != prefs['editrows']:
req.session.set('wiki_editrows', editrows,
defaults['editrows'])
else:
editrows = prefs['editrows']
data = self._page_data(req, page, action)
context = web_context(req, page.resource)
data.update({
'author': author,
'comment': comment,
'edit_rows': editrows, 'sidebyside': sidebyside,
'scroll_bar_pos': req.args.get('scroll_bar_pos', ''),
'diff': None,
'attachments': AttachmentModule(self.env).attachment_data(context),
})
if action in ('diff', 'merge'):
old_text = original_text.splitlines() if original_text else []
new_text = page.text.splitlines() if page.text else []
diff_data, changes = self._prepare_diff(
req, page, old_text, new_text, page.version, '')
data.update({'diff': diff_data, 'changes': changes,
'action': 'preview', 'merge': action == 'merge',
'longcol': 'Version', 'shortcol': 'v'})
elif sidebyside and action != 'collision':
data['action'] = 'preview'
self._wiki_ctxtnav(req, page)
Chrome(self.env).add_wiki_toolbars(req)
Chrome(self.env).add_auto_preview(req)
add_script(req, 'common/js/folding.js')
return 'wiki_edit.html', data, None
def _render_history(self, req, page):
"""Extract the complete history for a given page.
This information is used to present a changelog/history for a given
page.
"""
if not page.exists:
raise TracError(_("Page %(name)s does not exist", name=page.name))
data = self._page_data(req, page, 'history')
history = []
for version, date, author, comment, ipnr in page.get_history():
history.append({
'version': version,
'date': date,
'author': author,
'comment': comment,
'ipnr': ipnr
})
data.update({'history': history, 'resource': page.resource})
add_ctxtnav(req, _("Back to %(wikipage)s", wikipage=page.name),
req.href.wiki(page.name))
return 'history_view.html', data, None
def _render_view(self, req, page):
version = page.resource.version
# Add registered converters
if page.exists:
for conversion in Mimeview(self.env).get_supported_conversions(
'text/x-trac-wiki'):
conversion_href = req.href.wiki(page.name, version=version,
format=conversion[0])
# or...
conversion_href = get_resource_url(self.env, page.resource,
req.href, format=conversion[0])
add_link(req, 'alternate', conversion_href, conversion[1],
conversion[3])
data = self._page_data(req, page)
if page.name == 'WikiStart':
data['title'] = ''
ws = WikiSystem(self.env)
context = web_context(req, page.resource)
higher, related = [], []
if not page.exists:
if 'WIKI_CREATE' not in req.perm(page.resource):
raise ResourceNotFound(_('Page %(name)s not found',
name=page.name))
formatter = OneLinerFormatter(self.env, context)
if '/' in page.name:
parts = page.name.split('/')
for i in range(len(parts) - 2, -1, -1):
name = '/'.join(parts[:i] + [parts[-1]])
if not ws.has_page(name):
higher.append(ws._format_link(formatter, 'wiki',
'/' + name, name, False))
else:
name = page.name
name = name.lower()
related = [each for each in ws.pages
if name in each.lower()
and 'WIKI_VIEW' in req.perm('wiki', each)]
related.sort()
related = [ws._format_link(formatter, 'wiki', '/' + each, each,
False)
for each in related]
latest_page = WikiPage(self.env, page.name, version=None)
req.perm(latest_page.resource).require('WIKI_VIEW')
prev_version = next_version = None
if version:
try:
version = int(version)
for hist in latest_page.get_history():
v = hist[0]
if v != version:
if v < version:
if not prev_version:
prev_version = v
break
else:
next_version = v
except ValueError:
version = None
prefix = self.PAGE_TEMPLATES_PREFIX
templates = [template[len(prefix):]
for template in ws.get_pages(prefix)
if 'WIKI_VIEW' in req.perm('wiki', template)]
# -- prev/up/next links
if prev_version:
add_link(req, 'prev',
req.href.wiki(page.name, version=prev_version),
_('Version %(num)s', num=prev_version))
parent = None
if version:
add_link(req, 'up', req.href.wiki(page.name, version=None),
_('View latest version'))
elif '/' in page.name:
parent = page.name[:page.name.rindex('/')]
add_link(req, 'up', req.href.wiki(parent, version=None),
_("View parent page"))
if next_version:
add_link(req, 'next',
req.href.wiki(page.name, version=next_version),
_('Version %(num)s', num=next_version))
# Add ctxtnav entries
if version:
prevnext_nav(req, _('Previous Version'), _('Next Version'),
_('View Latest Version'))
else:
if parent:
add_ctxtnav(req, _('Up'), req.href.wiki(parent))
self._wiki_ctxtnav(req, page)
# Plugin content validation
fields = {'text': page.text}
for manipulator in self.page_manipulators:
manipulator.prepare_wiki_page(req, page, fields)
text = fields.get('text', '')
data.update({
'context': context,
'text': text,
'latest_version': latest_page.version,
'attachments': AttachmentModule(self.env).attachment_data(context),
'default_template': self.DEFAULT_PAGE_TEMPLATE,
'templates': templates,
'version': version,
'higher': higher, 'related': related,
'resourcepath_template': 'wiki_page_path.html',
})
add_script(req, 'common/js/folding.js')
return 'wiki_view.html', data, None
def _wiki_ctxtnav(self, req, page):
"""Add the normal wiki ctxtnav entries."""
add_ctxtnav(req, _('Start Page'), req.href.wiki('WikiStart'))
add_ctxtnav(req, _('Index'), req.href.wiki('TitleIndex'))
if page.exists:
add_ctxtnav(req, _('History'), req.href.wiki(page.name,
action='history'))
# ITimelineEventProvider methods
def get_timeline_filters(self, req):
if 'WIKI_VIEW' in req.perm:
yield ('wiki', _('Wiki changes'))
def get_timeline_events(self, req, start, stop, filters):
if 'wiki' in filters:
wiki_realm = Resource('wiki')
for ts, name, comment, author, version in self.env.db_query("""
SELECT time, name, comment, author, version FROM wiki
WHERE time>=%s AND time<=%s
""", (to_utimestamp(start), to_utimestamp(stop))):
wiki_page = wiki_realm(id=name, version=version)
if 'WIKI_VIEW' not in req.perm(wiki_page):
continue
yield ('wiki', from_utimestamp(ts), author,
(wiki_page, comment))
# Attachments
for event in AttachmentModule(self.env).get_timeline_events(
req, wiki_realm, start, stop):
yield event
def render_timeline_event(self, context, field, event):
wiki_page, comment = event[3]
if field == 'url':
return context.href.wiki(wiki_page.id, version=wiki_page.version)
elif field == 'title':
name = tag.em(get_resource_name(self.env, wiki_page))
if wiki_page.version > 1:
return tag_('%(page)s edited', page=name)
else:
return tag_('%(page)s created', page=name)
elif field == 'description':
markup = format_to(self.env, None,
context.child(resource=wiki_page), comment)
if wiki_page.version > 1:
diff_href = context.href.wiki(
wiki_page.id, version=wiki_page.version, action='diff')
markup = tag(markup,
' (', tag.a(_('diff'), href=diff_href), ')')
return markup
# ISearchSource methods
def get_search_filters(self, req):
if 'WIKI_VIEW' in req.perm:
yield ('wiki', _('Wiki'))
def get_search_results(self, req, terms, filters):
if not 'wiki' in filters:
return
with self.env.db_query as db:
sql_query, args = search_to_sql(db, ['w1.name', 'w1.author',
'w1.text'], terms)
wiki_realm = Resource('wiki')
for name, ts, author, text in db("""
SELECT w1.name, w1.time, w1.author, w1.text
FROM wiki w1,(SELECT name, max(version) AS ver
FROM wiki GROUP BY name) w2
WHERE w1.version = w2.ver AND w1.name = w2.name
AND """ + sql_query, args):
page = wiki_realm(id=name)
if 'WIKI_VIEW' in req.perm(page):
yield (get_resource_url(self.env, page, req.href),
'%s: %s' % (name, shorten_line(text)),
from_utimestamp(ts), author,
shorten_result(text, terms))
# Attachments
for result in AttachmentModule(self.env).get_search_results(
req, wiki_realm, terms):
yield result
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/macros.py | trac/trac/wiki/macros.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2012 Edgewall Software
# Copyright (C) 2005-2006 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from fnmatch import fnmatchcase
from itertools import groupby
import inspect
import os
import re
from StringIO import StringIO
from genshi.builder import tag
from genshi.core import Markup
from trac.core import *
from trac.resource import (
Resource, ResourceNotFound, get_resource_name, get_resource_summary,
get_resource_url
)
from trac.util.datefmt import format_date, from_utimestamp, user_time
from trac.util.html import escape, find_element
from trac.util.presentation import separated
from trac.util.text import unicode_quote, to_unicode, stripws
from trac.util.translation import _, dgettext, cleandoc_
from trac.wiki.api import IWikiMacroProvider, WikiSystem, parse_args
from trac.wiki.formatter import (
format_to_html, format_to_oneliner, extract_link, OutlineFormatter
)
class WikiMacroBase(Component):
"""Abstract base class for wiki macros."""
implements(IWikiMacroProvider)
abstract = True
#: A gettext domain to translate the macro description
_domain = None
#: A macro description
_description = None
def get_macros(self):
"""Yield the name of the macro based on the class name."""
name = self.__class__.__name__
if name.endswith('Macro'):
name = name[:-5]
yield name
def get_macro_description(self, name):
"""Return the subclass's gettext domain and macro description"""
domain, description = self._domain, self._description
if description:
return (domain, description) if domain else description
# For pre-0.12 compatibility
doc = inspect.getdoc(self.__class__)
return to_unicode(doc) if doc else ''
def parse_macro(self, parser, name, content):
raise NotImplementedError
def expand_macro(self, formatter, name, content):
raise NotImplementedError(
"pre-0.11 Wiki macro %s by provider %s no longer supported" %
(name, self.__class__))
class TitleIndexMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Insert an alphabetic list of all wiki pages into the output.
Accepts a prefix string as parameter: if provided, only pages with names
that start with the prefix are included in the resulting list. If this
parameter is omitted, all pages are listed.
If the prefix is specified, a second argument of value `hideprefix`
can be given as well, in order to remove that prefix from the output.
Alternate `format` and `depth` named parameters can be specified:
- `format=compact`: The pages are displayed as comma-separated links.
- `format=group`: The list of pages will be structured in groups
according to common prefix. This format also supports a `min=n`
argument, where `n` is the minimal number of pages for a group.
- `format=hierarchy`: The list of pages will be structured according
to the page name path hierarchy. This format also supports a `min=n`
argument, where higher `n` flatten the display hierarchy
- `depth=n`: limit the depth of the pages to list. If set to 0,
only toplevel pages will be shown, if set to 1, only immediate
children pages will be shown, etc. If not set, or set to -1,
all pages in the hierarchy will be shown.
- `include=page1:page*2`: include only pages that match an item in the
colon-separated list of pages. If the list is empty, or if no `include`
argument is given, include all pages.
- `exclude=page1:page*2`: exclude pages that match an item in the colon-
separated list of pages.
The `include` and `exclude` lists accept shell-style patterns.
""")
SPLIT_RE = re.compile(r"(/| )")
NUM_SPLIT_RE = re.compile(r"([0-9.]+)")
def expand_macro(self, formatter, name, content):
args, kw = parse_args(content)
prefix = args[0].strip() if args else None
hideprefix = args and len(args) > 1 and args[1].strip() == 'hideprefix'
minsize = max(int(kw.get('min', 1)), 1)
minsize_group = max(minsize, 2)
depth = int(kw.get('depth', -1))
start = prefix.count('/') if prefix else 0
format = kw.get('format', '')
def parse_list(name):
return [inc.strip() for inc in kw.get(name, '').split(':')
if inc.strip()]
includes = parse_list('include') or ['*']
excludes = parse_list('exclude')
if hideprefix:
omitprefix = lambda page: page[len(prefix):]
else:
omitprefix = lambda page: page
wiki = formatter.wiki
pages = sorted(page for page in wiki.get_pages(prefix)
if (depth < 0 or depth >= page.count('/') - start)
and 'WIKI_VIEW' in formatter.perm('wiki', page)
and any(fnmatchcase(page, inc) for inc in includes)
and not any(fnmatchcase(page, exc) for exc in excludes))
if format == 'compact':
return tag(
separated((tag.a(wiki.format_page_name(omitprefix(p)),
href=formatter.href.wiki(p)) for p in pages),
', '))
# the function definitions for the different format styles
# the different page split formats, each corresponding to its rendering
def split_pages_group(pages):
"""Return a list of (path elements, page_name) pairs,
where path elements correspond to the page name (without prefix)
splitted at Camel Case word boundaries, numbers and '/'.
"""
page_paths = []
for page in pages:
path = [elt.strip() for elt in self.SPLIT_RE.split(
self.NUM_SPLIT_RE.sub(r" \1 ",
wiki.format_page_name(omitprefix(page), split=True)))]
page_paths.append(([elt for elt in path if elt], page))
return page_paths
def split_pages_hierarchy(pages):
"""Return a list of (path elements, page_name) pairs,
where path elements correspond to the page name (without prefix)
splitted according to the '/' hierarchy.
"""
return [(wiki.format_page_name(omitprefix(page)).split("/"), page)
for page in pages]
# the different tree structures, each corresponding to its rendering
def tree_group(entries):
"""Transform a flat list of entries into a tree structure.
`entries` is a list of `(path_elements, page_name)` pairs
Return a list organized in a tree structure, in which:
- a leaf is a page name
- a node is a `(key, nodes)` pairs, where:
- `key` is the leftmost of the path elements, common to the
grouped (path element, page_name) entries
- `nodes` is a list of nodes or leaves
"""
groups = []
for key, grouper in groupby(entries, lambda (elts, name):
elts[0] if elts else ''):
# remove key from path_elements in grouped entries for further
# grouping
grouped_entries = [(path_elements[1:], page_name)
for path_elements, page_name in grouper]
if key and len(grouped_entries) >= minsize_group:
subnodes = tree_group(sorted(grouped_entries))
if len(subnodes) == 1:
subkey, subnodes = subnodes[0]
node = (key + subkey, subnodes)
groups.append(node)
elif self.SPLIT_RE.match(key):
for elt in subnodes:
if isinstance(elt, tuple):
subkey, subnodes = elt
elt = (key + subkey, subnodes)
groups.append(elt)
else:
node = (key, subnodes)
groups.append(node)
else:
for path_elements, page_name in grouped_entries:
groups.append(page_name)
return groups
def tree_hierarchy(entries):
"""Transform a flat list of entries into a tree structure.
`entries` is a list of `(path_elements, page_name)` pairs
Return a list organized in a tree structure, in which:
- a leaf is a `(rest, page)` pair, where:
- `rest` is the rest of the path to be shown
- `page` is a page name
- a node is a `(key, nodes, page)` pair, where:
- `key` is the leftmost of the path elements, common to the
grouped (path element, page_name) entries
- `page` is a page name (if one exists for that node)
- `nodes` is a list of nodes or leaves
"""
groups = []
for key, grouper in groupby(entries, lambda (elts, name):
elts[0] if elts else ''):
grouped_entries = [e for e in grouper]
sub_entries = [e for e in grouped_entries if len(e[0]) > 1]
key_entries = [e for e in grouped_entries if len(e[0]) == 1]
key_entry = key_entries[0] if key_entries else None
key_page = key_entry[1] if key_entries else None
if key and len(sub_entries) >= minsize:
# remove key from path_elements in grouped entries for
# further grouping
sub_entries = [(path_elements[1:], page)
for path_elements, page in sub_entries]
subnodes = tree_hierarchy(sorted(sub_entries))
node = (key, key_page, subnodes)
groups.append(node)
else:
if key_entry:
groups.append(key_entry)
groups.extend(sub_entries)
return groups
# the different rendering formats
def render_group(group):
return tag.ul(
tag.li(tag(tag.strong(elt[0].strip('/')), render_group(elt[1]))
if isinstance(elt, tuple) else
tag.a(wiki.format_page_name(omitprefix(elt)),
href=formatter.href.wiki(elt)))
for elt in group)
def render_hierarchy(group):
return tag.ul(
tag.li(tag(tag.a(elt[0], href=formatter.href.wiki(elt[1]))
if elt[1] else tag(elt[0]),
render_hierarchy(elt[2]))
if len(elt) == 3 else
tag.a('/'.join(elt[0]),
href=formatter.href.wiki(elt[1])))
for elt in group)
transform = {
'group': lambda p: render_group(tree_group(split_pages_group(p))),
'hierarchy': lambda p: render_hierarchy(
tree_hierarchy(split_pages_hierarchy(p))),
}.get(format, None)
if transform:
titleindex = transform(pages)
else:
titleindex = tag.ul(
tag.li(tag.a(wiki.format_page_name(omitprefix(page)),
href=formatter.href.wiki(page)))
for page in pages)
return tag.div(titleindex, class_='titleindex')
class RecentChangesMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""List all pages that have recently been modified, ordered by the
time they were last modified.
This macro accepts two ordered arguments and a named argument. The named
argument can be placed in any position within the argument list.
The first parameter is a prefix string: if provided, only pages with names
that start with the prefix are included in the resulting list. If this
parameter is omitted, all pages are included in the list.
The second parameter is the maximum number of pages to include in the
list.
The `group` parameter determines how the list is presented:
`group=date` :: The pages are presented in bulleted lists that are
grouped by date (default).
`group=none` :: The pages are presented in a single bulleted list.
Tip: if you only want to specify a maximum number of entries and
don't want to filter by prefix, specify an empty first parameter,
e.g. `[[RecentChanges(,10,group=none)]]`.
""")
def expand_macro(self, formatter, name, content):
args, kw = parse_args(content)
prefix = args[0].strip() if args else None
limit = int(args[1].strip()) if len(args) > 1 else None
group = kw.get('group', 'date')
sql = """SELECT name, max(version) AS max_version,
max(time) AS max_time FROM wiki"""
args = []
if prefix:
sql += " WHERE name LIKE %s"
args.append(prefix + '%')
sql += " GROUP BY name ORDER BY max_time DESC"
if limit:
sql += " LIMIT %s"
args.append(limit)
entries_per_date = []
prevdate = None
for name, version, ts in self.env.db_query(sql, args):
if not 'WIKI_VIEW' in formatter.perm('wiki', name, version):
continue
req = formatter.req
date = user_time(req, format_date, from_utimestamp(ts))
if date != prevdate:
prevdate = date
entries_per_date.append((date, []))
version = int(version)
diff_href = None
if version > 1:
diff_href = formatter.href.wiki(name, action='diff',
version=version)
page_name = formatter.wiki.format_page_name(name)
entries_per_date[-1][1].append((page_name, name, version,
diff_href))
items_per_date = (
(date, (tag.li(tag.a(page, href=formatter.href.wiki(name)),
tag.small(' (', tag.a('diff', href=diff_href), ')')
if diff_href else None, '\n')
for page, name, version, diff_href in entries))
for date, entries in entries_per_date)
if group == 'date':
out = ((tag.h3(date), tag.ul(entries))
for date, entries in items_per_date)
else:
out = tag.ul((entries)
for date, entries in items_per_date)
return tag.div(out)
class PageOutlineMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Display a structural outline of the current wiki page, each item in the
outline being a link to the corresponding heading.
This macro accepts four optional parameters:
* The first is a number or range that allows configuring the minimum and
maximum level of headings that should be included in the outline. For
example, specifying "1" here will result in only the top-level headings
being included in the outline. Specifying "2-3" will make the outline
include all headings of level 2 and 3, as a nested list. The default is
to include all heading levels.
* The second parameter can be used to specify a custom title (the default
is no title).
* The third parameter selects the style of the outline. This can be
either `inline` or `pullout` (the latter being the default). The
`inline` style renders the outline as normal part of the content, while
`pullout` causes the outline to be rendered in a box that is by default
floated to the right side of the other content.
* The fourth parameter specifies whether the outline is numbered or not.
It can be either `numbered` or `unnumbered` (the former being the
default). This parameter only has an effect in `inline` style.
""")
def expand_macro(self, formatter, name, content):
min_depth, max_depth = 1, 6
title = None
inline = False
numbered = True
if content:
argv = [arg.strip() for arg in content.split(',')]
if len(argv) > 0:
depth = argv[0]
if '-' in depth:
min_depth, max_depth = [int(d)
for d in depth.split('-', 1)]
else:
min_depth = max_depth = int(depth)
if len(argv) > 1:
title = argv[1].strip()
for arg in argv[2:]:
arg = arg.strip().lower()
if arg == 'inline':
inline = True
elif arg == 'unnumbered':
numbered = False
# TODO: - integrate the rest of the OutlineFormatter directly here
# - use formatter.wikidom instead of formatter.source
out = StringIO()
oformatter = OutlineFormatter(self.env, formatter.context)
oformatter.format(formatter.source, out, max_depth, min_depth,
shorten=not inline)
outline = Markup(out.getvalue())
if title:
outline = tag.h4(title) + outline
if not inline:
outline = tag.div(outline, class_='wiki-toc')
elif not numbered:
outline = tag.div(outline, class_='wiki-toc-un')
return outline
class ImageMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Embed an image in wiki-formatted text.
The first argument is the file specification. The file specification may
reference attachments in three ways:
* `module:id:file`, where module can be either '''wiki''' or '''ticket''',
to refer to the attachment named ''file'' of the specified wiki page or
ticket.
* `id:file`: same as above, but id is either a ticket shorthand or a Wiki
page name.
* `file` to refer to a local attachment named 'file'. This only works from
within that wiki page or a ticket.
Also, the file specification may refer to repository files, using the
`source:file` syntax (`source:file@rev` works also).
Files can also be accessed with a direct URLs; `/file` for a
project-relative, `//file` for a server-relative, or `http://server/file`
for absolute location of the file.
The remaining arguments are optional and allow configuring the attributes
and style of the rendered `<img>` element:
* digits and unit are interpreted as the size (ex. 120, 25%)
for the image
* `right`, `left`, `center`, `top`, `bottom` and `middle` are interpreted
as the alignment for the image (alternatively, the first three can be
specified using `align=...` and the last three using `valign=...`)
* `link=some TracLinks...` replaces the link to the image source by the
one specified using a TracLinks. If no value is specified, the link is
simply removed.
* `nolink` means without link to image source (deprecated, use `link=`)
* `key=value` style are interpreted as HTML attributes or CSS style
indications for the image. Valid keys are:
* align, valign, border, width, height, alt, title, longdesc, class,
margin, margin-(left,right,top,bottom), id and usemap
* `border`, `margin`, and `margin-`* can only be a single number
* `margin` is superseded by `center` which uses auto margins
Examples:
{{{
[[Image(photo.jpg)]] # simplest
[[Image(photo.jpg, 120px)]] # with image width size
[[Image(photo.jpg, right)]] # aligned by keyword
[[Image(photo.jpg, nolink)]] # without link to source
[[Image(photo.jpg, align=right)]] # aligned by attribute
}}}
You can use image from other page, other ticket or other module.
{{{
[[Image(OtherPage:foo.bmp)]] # if current module is wiki
[[Image(base/sub:bar.bmp)]] # from hierarchical wiki page
[[Image(#3:baz.bmp)]] # if in a ticket, point to #3
[[Image(ticket:36:boo.jpg)]]
[[Image(source:/images/bee.jpg)]] # straight from the repository!
[[Image(htdocs:foo/bar.png)]] # image file in project htdocs dir.
}}}
''Adapted from the Image.py macro created by Shun-ichi Goto
<gotoh@taiyo.co.jp>''
""")
def is_inline(self, content):
return True
def expand_macro(self, formatter, name, content):
# args will be null if the macro is called without parenthesis.
if not content:
return ''
# parse arguments
# we expect the 1st argument to be a filename (filespec)
args = content.split(',')
if len(args) == 0:
raise Exception("No argument.")
# strip unicode white-spaces and ZWSPs are copied from attachments
# section (#10668)
filespec = stripws(args.pop(0))
# style information
size_re = re.compile('[0-9]+(%|px)?$')
attr_re = re.compile('(align|valign|border|width|height|alt'
'|margin(?:-(?:left|right|top|bottom))?'
'|title|longdesc|class|id|usemap)=(.+)')
quoted_re = re.compile("(?:[\"'])(.*)(?:[\"'])$")
attr = {}
style = {}
link = ''
# helper for the special case `source:`
#
from trac.versioncontrol.web_ui import BrowserModule
# FIXME: somehow use ResourceSystem.get_known_realms()
# ... or directly trac.wiki.extract_link
try:
browser_links = [res[0] for res in
BrowserModule(self.env).get_link_resolvers()]
except Exception:
browser_links = []
while args:
arg = args.pop(0).strip()
if size_re.match(arg):
# 'width' keyword
attr['width'] = arg
elif arg == 'nolink':
link = None
elif arg.startswith('link='):
val = arg.split('=', 1)[1]
elt = extract_link(self.env, formatter.context, val.strip())
elt = find_element(elt, 'href')
link = None
if elt is not None:
link = elt.attrib.get('href')
elif arg in ('left', 'right'):
style['float'] = arg
elif arg == 'center':
style['margin-left'] = style['margin-right'] = 'auto'
style['display'] = 'block'
style.pop('margin', '')
elif arg in ('top', 'bottom', 'middle'):
style['vertical-align'] = arg
else:
match = attr_re.match(arg)
if match:
key, val = match.groups()
if (key == 'align' and
val in ('left', 'right', 'center')) or \
(key == 'valign' and \
val in ('top', 'middle', 'bottom')):
args.append(val)
elif key in ('margin-top', 'margin-bottom'):
style[key] = ' %dpx' % int(val)
elif key in ('margin', 'margin-left', 'margin-right') \
and 'display' not in style:
style[key] = ' %dpx' % int(val)
elif key == 'border':
style['border'] = ' %dpx solid' % int(val)
else:
m = quoted_re.search(val) # unquote "..." and '...'
if m:
val = m.group(1)
attr[str(key)] = val # will be used as a __call__ kwd
# parse filespec argument to get realm and id if contained.
parts = filespec.split(':')
url = raw_url = desc = None
attachment = None
if (parts and parts[0] in ('http', 'https', 'ftp')): # absolute
raw_url = url = filespec
desc = url.rsplit('?')[0]
elif filespec.startswith('//'): # server-relative
raw_url = url = filespec[1:]
desc = url.rsplit('?')[0]
elif filespec.startswith('/'): # project-relative
params = ''
if '?' in filespec:
filespec, params = filespec.rsplit('?', 1)
url = formatter.href(filespec)
if params:
url += '?' + params
raw_url, desc = url, filespec
elif len(parts) == 3: # realm:id:attachment-filename
# # or intertrac:realm:id
realm, id, filename = parts
intertrac_target = "%s:%s" % (id, filename)
it = formatter.get_intertrac_url(realm, intertrac_target)
if it:
url, desc = it
raw_url = url + unicode_quote('?format=raw')
else:
attachment = Resource(realm, id).child('attachment', filename)
elif len(parts) == 2:
realm, filename = parts
if realm in browser_links: # source:path
# TODO: use context here as well
rev = None
if '@' in filename:
filename, rev = filename.rsplit('@', 1)
url = formatter.href.browser(filename, rev=rev)
raw_url = formatter.href.browser(filename, rev=rev,
format='raw')
desc = filespec
else: # #ticket:attachment or WikiPage:attachment
# FIXME: do something generic about shorthand forms...
realm = None
id, filename = parts
if id and id[0] == '#':
realm = 'ticket'
id = id[1:]
elif id == 'htdocs':
raw_url = url = formatter.href.chrome('site', filename)
desc = os.path.basename(filename)
else:
realm = 'wiki'
if realm:
attachment = Resource(realm, id).child('attachment',
filename)
elif len(parts) == 1: # it's an attachment of the current resource
attachment = formatter.resource.child('attachment', filespec)
else:
raise TracError('No filespec given')
if attachment and 'ATTACHMENT_VIEW' in formatter.perm(attachment):
url = get_resource_url(self.env, attachment, formatter.href)
raw_url = get_resource_url(self.env, attachment, formatter.href,
format='raw')
try:
desc = get_resource_summary(self.env, attachment)
except ResourceNotFound, e:
raw_url = formatter.href.chrome('common/attachment.png')
desc = _('No image "%(id)s" attached to %(parent)s',
id=attachment.id,
parent=get_resource_name(self.env, attachment.parent))
for key in ('title', 'alt'):
if desc and not key in attr:
attr[key] = desc
if style:
attr['style'] = '; '.join('%s:%s' % (k, escape(v))
for k, v in style.iteritems())
result = tag.img(src=raw_url, **attr)
if link is not None:
result = tag.a(result, href=link or url,
style='padding:0; border:none')
return result
class MacroListMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Display a list of all installed Wiki macros, including documentation if
available.
Optionally, the name of a specific macro can be provided as an argument. In
that case, only the documentation for that macro will be rendered.
Note that this macro will not be able to display the documentation of
macros if the `PythonOptimize` option is enabled for mod_python!
""")
def expand_macro(self, formatter, name, content):
from trac.wiki.formatter import system_message
content = content.strip() if content else ''
name_filter = content.strip('*')
def get_macro_descr():
for macro_provider in formatter.wiki.macro_providers:
names = list(macro_provider.get_macros() or [])
if name_filter and not any(name.startswith(name_filter)
for name in names):
continue
try:
name_descriptions = [
(name, macro_provider.get_macro_description(name))
for name in names]
except Exception, e:
yield system_message(
_("Error: Can't get description for macro %(name)s",
name=names[0]), e), names
else:
for descr, pairs in groupby(name_descriptions,
key=lambda p: p[1]):
if descr:
if isinstance(descr, (tuple, list)):
descr = dgettext(descr[0],
to_unicode(descr[1])) \
if descr[1] else ''
else:
descr = to_unicode(descr) or ''
if content == '*':
descr = format_to_oneliner(
self.env, formatter.context, descr,
shorten=True)
else:
descr = format_to_html(
self.env, formatter.context, descr)
yield descr, [name for name, descr in pairs]
return tag.div(class_='trac-macrolist')(
(tag.h3(tag.code('[[', names[0], ']]'), id='%s-macro' % names[0]),
len(names) > 1 and tag.p(tag.strong(_("Aliases:")),
[tag.code(' [[', alias, ']]')
for alias in names[1:]]) or None,
description or tag.em(_("Sorry, no documentation found")))
for description, names in sorted(get_macro_descr(),
key=lambda item: item[1][0]))
class TracIniMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Produce documentation for the Trac configuration file.
Typically, this will be used in the TracIni page.
Optional arguments are a configuration section filter,
and a configuration option name filter: only the configuration
options whose section and name start with the filters are output.
""")
def expand_macro(self, formatter, name, args):
from trac.config import ConfigSection, Option
section_filter = key_filter = ''
args, kw = parse_args(args)
if args:
section_filter = args.pop(0).strip()
if args:
key_filter = args.pop(0).strip()
def getdoc(option_or_section):
doc = to_unicode(option_or_section.__doc__)
if doc:
doc = dgettext(option_or_section.doc_domain, doc)
return doc
registry = ConfigSection.get_registry(self.compmgr)
sections = dict((name, getdoc(section))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/admin.py | trac/trac/wiki/admin.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from __future__ import with_statement
from datetime import datetime
import os.path
import pkg_resources
import sys
from trac.admin import *
from trac.core import *
from trac.wiki import model
from trac.wiki.api import WikiSystem, validate_page_name
from trac.util import read_file
from trac.util.datefmt import format_datetime, from_utimestamp, \
to_utimestamp, utc
from trac.util.text import path_to_unicode, print_table, printout, \
to_unicode, unicode_quote, unicode_unquote
from trac.util.translation import _
class WikiAdmin(Component):
"""trac-admin command provider for wiki administration."""
implements(IAdminCommandProvider)
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('wiki list', '',
'List wiki pages',
None, self._do_list)
yield ('wiki rename', '<page> <new_name>',
'Rename wiki page',
self._complete_page, self._do_rename)
yield ('wiki remove', '<page>',
'Remove wiki page',
self._complete_page, self._do_remove)
yield ('wiki export', '<page> [file]',
'Export wiki page to file or stdout',
self._complete_import_export, self._do_export)
yield ('wiki import', '<page> [file]',
'Import wiki page from file or stdin',
self._complete_import_export, self._do_import)
yield ('wiki dump', '<directory> [page] [...]',
"""Export wiki pages to files named by title
Individual wiki page names can be specified after the directory.
A name ending with a * means that all wiki pages starting with
that prefix should be dumped. If no name is specified, all wiki
pages are dumped.""",
self._complete_dump, self._do_dump)
yield ('wiki load', '<path> [...]',
"""Import wiki pages from files
If a given path is a file, it is imported as a page with the
name of the file. If a path is a directory, all files in that
directory are imported.""",
self._complete_load_replace, self._do_load)
yield ('wiki replace', '<path> [...]',
"""Replace the content of wiki pages from files (DANGEROUS!)
This command replaces the content of the last version of one
or more wiki pages with new content. The previous content is
lost, and no new entry is created in the page history. The
metadata of the page (time, author) is not changed either.
If a given path is a file, it is imported as a page with the
name of the file. If a path is a directory, all files in that
directory are imported.
WARNING: This operation results in the loss of the previous
content and cannot be undone. It may be advisable to backup
the current content using "wiki dump" beforehand.""",
self._complete_load_replace, self._do_replace)
yield ('wiki upgrade', '',
'Upgrade default wiki pages to current version',
None, self._do_upgrade)
def get_wiki_list(self):
return list(WikiSystem(self.env).get_pages())
def export_page(self, page, filename, cursor=None):
"""
:since 1.0: the `cursor` parameter is no longer needed and will be
removed in version 1.1.1
"""
for text, in self.env.db_query("""
SELECT text FROM wiki WHERE name=%s
ORDER BY version DESC LIMIT 1
""", (page,)):
if not filename:
printout(text)
else:
if os.path.isfile(filename):
raise AdminCommandError(_("File '%(name)s' exists",
name=path_to_unicode(filename)))
with open(filename, 'w') as f:
f.write(text.encode('utf-8'))
break
else:
raise AdminCommandError(_("Page '%(page)s' not found", page=page))
def import_page(self, filename, title, create_only=[],
replace=False):
if not validate_page_name(title):
raise AdminCommandError(_("Invalid Wiki page name '%(name)s'",
name=title))
if filename:
if not os.path.isfile(filename):
raise AdminCommandError(_("'%(name)s' is not a file",
name=path_to_unicode(filename)))
data = read_file(filename)
else:
data = sys.stdin.read()
data = to_unicode(data, 'utf-8')
with self.env.db_transaction as db:
# Make sure we don't insert the exact same page twice
old = db("""SELECT text FROM wiki WHERE name=%s
ORDER BY version DESC LIMIT 1
""", (title,))
if old and title in create_only:
printout(_(" %(title)s already exists", title=title))
return False
if old and data == old[0][0]:
printout(_(" %(title)s is already up to date", title=title))
return False
if replace and old:
db("""UPDATE wiki SET text=%s
WHERE name=%s
AND version=(SELECT max(version) FROM wiki
WHERE name=%s)
""", (data, title, title))
else:
db("""INSERT INTO wiki(version, name, time, author, ipnr, text)
SELECT 1 + COALESCE(max(version), 0), %s, %s, 'trac',
'127.0.0.1', %s FROM wiki WHERE name=%s
""", (title, to_utimestamp(datetime.now(utc)), data,
title))
if not old:
del WikiSystem(self.env).pages
return True
def load_pages(self, dir, ignore=[], create_only=[], replace=False):
with self.env.db_transaction:
for page in os.listdir(dir):
if page in ignore:
continue
filename = os.path.join(dir, page)
page = unicode_unquote(page.encode('utf-8'))
if os.path.isfile(filename):
if self.import_page(filename, page, create_only, replace):
printout(_(" %(page)s imported from %(filename)s",
filename=path_to_unicode(filename),
page=page))
def _complete_page(self, args):
if len(args) == 1:
return self.get_wiki_list()
def _complete_import_export(self, args):
if len(args) == 1:
return self.get_wiki_list()
elif len(args) == 2:
return get_dir_list(args[-1])
def _complete_dump(self, args):
if len(args) == 1:
return get_dir_list(args[-1], dirs_only=True)
elif len(args) >= 2:
return self.get_wiki_list()
def _complete_load_replace(self, args):
if len(args) >= 1:
return get_dir_list(args[-1])
def _do_list(self):
print_table(
[(title, int(edits), format_datetime(from_utimestamp(modified),
console_datetime_format))
for title, edits, modified in self.env.db_query("""
SELECT name, max(version), max(time)
FROM wiki GROUP BY name ORDER BY name""")
], [_("Title"), _("Edits"), _("Modified")])
def _do_rename(self, name, new_name):
if new_name == name:
return
if not new_name:
raise AdminCommandError(_("A new name is mandatory for a rename."))
if not validate_page_name(new_name):
raise AdminCommandError(_("The new name is invalid."))
with self.env.db_transaction:
if model.WikiPage(self.env, new_name).exists:
raise AdminCommandError(_("The page %(name)s already exists.",
name=new_name))
page = model.WikiPage(self.env, name)
page.rename(new_name)
def _do_remove(self, name):
with self.env.db_transaction:
if name.endswith('*'):
pages = list(WikiSystem(self.env).get_pages(name.rstrip('*')
or None))
for p in pages:
page = model.WikiPage(self.env, p)
page.delete()
print_table(((p,) for p in pages), [_('Deleted pages')])
else:
page = model.WikiPage(self.env, name)
page.delete()
def _do_export(self, page, filename=None):
self.export_page(page, filename)
def _do_import(self, page, filename=None):
self.import_page(filename, page)
def _do_dump(self, directory, *names):
if not names:
names = ['*']
pages = self.get_wiki_list()
if not os.path.isdir(directory):
if not os.path.exists(directory):
os.mkdir(directory)
else:
raise AdminCommandError(_("'%(name)s' is not a directory",
name=path_to_unicode(directory)))
for p in pages:
if any(p == name or (name.endswith('*')
and p.startswith(name[:-1]))
for name in names):
dst = os.path.join(directory, unicode_quote(p, ''))
printout(' %s => %s' % (p, dst))
self.export_page(p, dst)
def _load_or_replace(self, paths, replace):
with self.env.db_transaction:
for path in paths:
if os.path.isdir(path):
self.load_pages(path, replace=replace)
else:
page = os.path.basename(path)
page = unicode_unquote(page.encode('utf-8'))
if self.import_page(path, page, replace=replace):
printout(_(" %(page)s imported from %(filename)s",
filename=path_to_unicode(path), page=page))
def _do_load(self, *paths):
self._load_or_replace(paths, replace=False)
def _do_replace(self, *paths):
self._load_or_replace(paths, replace=True)
def _do_upgrade(self):
self.load_pages(pkg_resources.resource_filename('trac.wiki',
'default-pages'),
ignore=['WikiStart', 'checkwiki.py'],
create_only=['InterMapTxt'])
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/interwiki.py | trac/trac/wiki/interwiki.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christian Boos <cboos@edgewall.org>
import re
from genshi.builder import tag
from trac.cache import cached
from trac.config import ConfigSection
from trac.core import *
from trac.util.translation import _, N_
from trac.wiki.api import IWikiChangeListener, IWikiMacroProvider
from trac.wiki.parser import WikiParser
from trac.wiki.formatter import split_url_into_path_query_fragment
class InterWikiMap(Component):
"""InterWiki map manager."""
implements(IWikiChangeListener, IWikiMacroProvider)
interwiki_section = ConfigSection('interwiki',
"""Every option in the `[interwiki]` section defines one InterWiki
prefix. The option name defines the prefix. The option value defines
the URL, optionally followed by a description separated from the URL
by whitespace. Parametric URLs are supported as well.
'''Example:'''
{{{
[interwiki]
MeatBall = http://www.usemod.com/cgi-bin/mb.pl?
PEP = http://www.python.org/peps/pep-$1.html Python Enhancement Proposal $1
tsvn = tsvn: Interact with TortoiseSvn
}}}
""")
_page_name = 'InterMapTxt'
_interwiki_re = re.compile(r"(%s)[ \t]+([^ \t]+)(?:[ \t]+#(.*))?" %
WikiParser.LINK_SCHEME, re.UNICODE)
_argspec_re = re.compile(r"\$\d")
# The component itself behaves as a read-only map
def __contains__(self, ns):
return ns.upper() in self.interwiki_map
def __getitem__(self, ns):
return self.interwiki_map[ns.upper()]
def keys(self):
return self.interwiki_map.keys()
# Expansion of positional arguments ($1, $2, ...) in URL and title
def _expand(self, txt, args):
"""Replace "$1" by the first args, "$2" by the second, etc."""
def setarg(match):
num = int(match.group()[1:])
return args[num - 1] if 0 < num <= len(args) else ''
return re.sub(InterWikiMap._argspec_re, setarg, txt)
def _expand_or_append(self, txt, args):
"""Like expand, but also append first arg if there's no "$"."""
if not args:
return txt
expanded = self._expand(txt, args)
return txt + args[0] if expanded == txt else expanded
def url(self, ns, target):
"""Return `(url, title)` for the given InterWiki `ns`.
Expand the colon-separated `target` arguments.
"""
ns, url, title = self[ns]
maxargnum = max([0] + [int(a[1:]) for a in
re.findall(InterWikiMap._argspec_re, url)])
target, query, fragment = split_url_into_path_query_fragment(target)
if maxargnum > 0:
args = target.split(':', (maxargnum - 1))
else:
args = [target]
url = self._expand_or_append(url, args)
ntarget, nquery, nfragment = split_url_into_path_query_fragment(url)
if query and nquery:
nquery = '%s&%s' % (nquery, query[1:])
else:
nquery = nquery or query
nfragment = fragment or nfragment # user provided takes precedence
expanded_url = ntarget + nquery + nfragment
expanded_title = self._expand(title, args)
if expanded_title == title:
expanded_title = _("%(target)s in %(name)s",
target=target, name=title)
return expanded_url, expanded_title
# IWikiChangeListener methods
def wiki_page_added(self, page):
if page.name == InterWikiMap._page_name:
del self.interwiki_map
def wiki_page_changed(self, page, version, t, comment, author, ipnr):
if page.name == InterWikiMap._page_name:
del self.interwiki_map
def wiki_page_deleted(self, page):
if page.name == InterWikiMap._page_name:
del self.interwiki_map
def wiki_page_version_deleted(self, page):
if page.name == InterWikiMap._page_name:
del self.interwiki_map
@cached
def interwiki_map(self, db):
"""Map from upper-cased namespaces to (namespace, prefix, title)
values.
"""
from trac.wiki.model import WikiPage
map = {}
content = WikiPage(self.env, InterWikiMap._page_name, db=db).text
in_map = False
for line in content.split('\n'):
if in_map:
if line.startswith('----'):
in_map = False
else:
m = re.match(InterWikiMap._interwiki_re, line)
if m:
prefix, url, title = m.groups()
url = url.strip()
title = title.strip() if title else prefix
map[prefix.upper()] = (prefix, url, title)
elif line.startswith('----'):
in_map = True
for prefix, value in self.interwiki_section.options():
value = value.split(None, 1)
if value:
url = value[0].strip()
title = value[1].strip() if len(value) > 1 else prefix
map[prefix.upper()] = (prefix, url, title)
return map
# IWikiMacroProvider methods
def get_macros(self):
yield 'InterWiki'
def get_macro_description(self, name):
return 'messages', \
N_("Provide a description list for the known InterWiki "
"prefixes.")
def expand_macro(self, formatter, name, content):
interwikis = []
for k in sorted(self.keys()):
prefix, url, title = self[k]
interwikis.append({
'prefix': prefix, 'url': url, 'title': title,
'rc_url': self._expand_or_append(url, ['RecentChanges']),
'description': url if title == prefix else title})
return tag.table(tag.tr(tag.th(tag.em("Prefix")),
tag.th(tag.em("Site"))),
[tag.tr(tag.td(tag.a(w['prefix'], href=w['rc_url'])),
tag.td(tag.a(w['description'],
href=w['url'])))
for w in interwikis ],
class_="wiki interwiki")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/parser.py | trac/trac/wiki/parser.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2003-2006 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2005-2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
# Christian Boos <cboos@edgewall.org>
import re
from trac.core import *
from trac.notification import EMAIL_LOOKALIKE_PATTERN
class WikiParser(Component):
"""Wiki text parser."""
# Some constants used for clarifying the Wiki regexps:
BOLDITALIC_TOKEN = "'''''"
BOLD_TOKEN = "'''"
BOLD_TOKEN_WIKICREOLE = r"\*\*"
ITALIC_TOKEN = "''"
ITALIC_TOKEN_WIKICREOLE = "//"
UNDERLINE_TOKEN = "__"
STRIKE_TOKEN = "~~"
SUBSCRIPT_TOKEN = ",,"
SUPERSCRIPT_TOKEN = r"\^"
INLINE_TOKEN = "`" # must be a single char (see P<definition> below)
STARTBLOCK_TOKEN = r"\{\{\{"
STARTBLOCK = "{{{"
ENDBLOCK_TOKEN = r"\}\}\}"
ENDBLOCK = "}}}"
BULLET_CHARS = u"-*\u2022"
LINK_SCHEME = r"[a-zA-Z][-a-zA-Z0-9+._]*" # as per RFC 2396 + '_'
INTERTRAC_SCHEME = r"[a-zA-Z.+-]*?" # no digits (for shorthand links)
QUOTED_STRING = r"'[^']+'|\"[^\"]+\""
SHREF_TARGET_FIRST = r"[\w/?!#@](?<!_)" # we don't want "_"
SHREF_TARGET_MIDDLE = r"(?:\|(?=[^|\s])|[^|<>\s])"
SHREF_TARGET_LAST = r"[\w/=](?<!_)" # we don't want "_"
def _lhref_relative_target(sep):
return r"[/\?#][^%s\]]*|\.\.?(?:[/\?#][^%s\]]*)?" % (sep, sep)
LHREF_RELATIVE_TARGET = _lhref_relative_target(r'\s')
XML_NAME = r"[\w:](?<!\d)[\w:.-]*?" # See http://www.w3.org/TR/REC-xml/#id
PROCESSOR = r"(\s*)#\!([\w+-][\w+-/]*)"
PROCESSOR_PARAM = r'''(?P<proc_pname>\w+)=(?P<proc_pval>".*?"|'.*?'|\w+)'''
def _set_anchor(name, sep):
return r'=#(?P<anchorname>%s)(?:%s(?P<anchorlabel>[^\]]*))?' % \
(name, sep)
# Sequence of regexps used by the engine
_pre_rules = [
# Font styles
r"(?P<bolditalic>!?%s)" % BOLDITALIC_TOKEN,
r"(?P<bold>!?%s)" % BOLD_TOKEN,
r"(?P<bold_wc>!?%s)" % BOLD_TOKEN_WIKICREOLE,
r"(?P<italic>!?%s)" % ITALIC_TOKEN,
r"(?P<italic_wc>!?%s)" % ITALIC_TOKEN_WIKICREOLE,
r"(?P<underline>!?%s)" % UNDERLINE_TOKEN,
r"(?P<strike>!?%s)" % STRIKE_TOKEN,
r"(?P<subscript>!?%s)" % SUBSCRIPT_TOKEN,
r"(?P<superscript>!?%s)" % SUPERSCRIPT_TOKEN,
r"(?P<inlinecode>!?%s(?P<inline>.*?)%s)" \
% (STARTBLOCK_TOKEN, ENDBLOCK_TOKEN),
r"(?P<inlinecode2>!?%s(?P<inline2>.*?)%s)" \
% (INLINE_TOKEN, INLINE_TOKEN),
]
# Rules provided by IWikiSyntaxProviders will be inserted here
_post_rules = [
# WikiCreole line breaks
r"(?P<linebreak_wc>!?\\\\)",
# e-mails
r"(?P<email>!?%s)" % EMAIL_LOOKALIKE_PATTERN,
# <wiki:Trac bracket links>
r"(?P<shrefbr>!?<(?P<snsbr>%s):(?P<stgtbr>[^>]+)>)" % LINK_SCHEME,
# &, < and > to &, < and >
r"(?P<htmlescape>[&<>])",
# wiki:TracLinks or intertrac:wiki:TracLinks
r"(?P<shref>!?((?P<sns>%s):(?P<stgt>%s:(?:%s)|%s|%s(?:%s*%s)?)))" \
% (LINK_SCHEME, LINK_SCHEME, QUOTED_STRING, QUOTED_STRING,
SHREF_TARGET_FIRST, SHREF_TARGET_MIDDLE, SHREF_TARGET_LAST),
# [wiki:TracLinks with optional label] or [/relative label]
(r"(?P<lhref>!?\[(?:"
r"(?P<rel>%s)|" % LHREF_RELATIVE_TARGET + # ./... or /...
r"(?P<lns>%s):(?P<ltgt>%s:(?:%s)|%s|[^\]\s\%s]*))" % \
(LINK_SCHEME, LINK_SCHEME, QUOTED_STRING, QUOTED_STRING, u'\u200b') +
# wiki:TracLinks or wiki:"trac links" or intertrac:wiki:"trac links"
r"(?:[\s%s]+(?P<label>%s|[^\]]*))?\])" % \
(u'\u200b', QUOTED_STRING)), # trailing space, optional label
# [=#anchor] creation
r"(?P<anchor>!?\[%s\])" % _set_anchor(XML_NAME, r'\s+'),
# [[macro]] call or [[WikiCreole link]]
(r"(?P<macrolink>!?\[\[(?:[^]]|][^]])+\]\])"),
# == heading == #hanchor
r"(?P<heading>^\s*(?P<hdepth>={1,6})\s(?P<htext>.*?)"
r"(?P<hanchor>#%s)?\s*$)" % XML_NAME,
# * list
r"(?P<list>^(?P<ldepth>\s*)"
ur"(?:[%s]|(?P<lstart>[0-9]+|[a-zA-Z]|[ivxIVX]{1,5})\.)\s)"
% (BULLET_CHARS),
# definition::
r"(?P<definition>^\s+"
r"((?:%s[^%s]*%s|%s(?:%s{,2}[^%s])*?%s|[^%s%s:]|:[^:])+::)(?:\s+|$))"
% (INLINE_TOKEN, INLINE_TOKEN, INLINE_TOKEN,
STARTBLOCK_TOKEN, ENDBLOCK[0], ENDBLOCK[0], ENDBLOCK_TOKEN,
INLINE_TOKEN, STARTBLOCK[0]),
# |- row separator
r"(?P<table_row_sep>!?\s*\|-+\s*"
r"(?P<table_row_params>%s\s*)*)" % PROCESSOR_PARAM,
# (leading space)
r"(?P<indent>^(?P<idepth>\s+)(?=\S))",
# || table ||
r"(?P<table_cell>!?(?P<table_cell_sep>=?(?:\|\|)+=?)"
r"(?P<table_cell_last>\s*\\?$)?)",
]
_processor_re = re.compile(PROCESSOR)
_startblock_re = re.compile(r"\s*%s(?:%s|\s*$)" %
(STARTBLOCK, PROCESSOR))
_processor_param_re = re.compile(PROCESSOR_PARAM)
_anchor_re = re.compile(r'[^\w:.-]+', re.UNICODE)
_macro_re = re.compile(r'''
(?P<macroname> [\w/+-]+ \?? | \? ) # macro, macro? or ?
(?: \( (?P<macroargs> .*? ) \) )? $ # optional arguments within ()
''', re.VERBOSE)
_creolelink_re = re.compile(r'''
(?:
(?P<rel> %(rel)s ) # rel is "./..." or "/..."
| (?: (?P<lns> %(scheme)s ) : )? # lns is the optional "scheme:"
(?P<ltgt> # ltgt is the optional target
%(scheme)s : (?:%(quoted)s) # - "scheme:'...quoted..'"
| %(quoted)s # - "'...quoted...'"
| [^|]+ # - anything but a '|'
)?
)
\s* (?: \| (?P<label> .* ) )? # optional label after a '|'
$
''' % {'rel': _lhref_relative_target(r'|'),
'scheme': LINK_SCHEME,
'quoted': QUOTED_STRING}, re.VERBOSE)
_set_anchor_wc_re = re.compile(_set_anchor(XML_NAME, r'\|\s*') + r'$')
def __init__(self):
self._compiled_rules = None
self._link_resolvers = None
self._helper_patterns = None
self._external_handlers = None
@property
def rules(self):
self._prepare_rules()
return self._compiled_rules
@property
def helper_patterns(self):
self._prepare_rules()
return self._helper_patterns
@property
def external_handlers(self):
self._prepare_rules()
return self._external_handlers
def _prepare_rules(self):
from trac.wiki.api import WikiSystem
if not self._compiled_rules:
helpers = []
handlers = {}
syntax = self._pre_rules[:]
i = 0
for resolver in WikiSystem(self.env).syntax_providers:
for regexp, handler in resolver.get_wiki_syntax() or []:
handlers['i' + str(i)] = handler
syntax.append('(?P<i%d>%s)' % (i, regexp))
i += 1
syntax += self._post_rules[:]
helper_re = re.compile(r'\?P<([a-z\d_]+)>')
for rule in syntax:
helpers += helper_re.findall(rule)[1:]
rules = re.compile('(?:' + '|'.join(syntax) + ')', re.UNICODE)
self._external_handlers = handlers
self._helper_patterns = helpers
self._compiled_rules = rules
@property
def link_resolvers(self):
if not self._link_resolvers:
from trac.wiki.api import WikiSystem
resolvers = {}
for resolver in WikiSystem(self.env).syntax_providers:
for namespace, handler in resolver.get_link_resolvers() or []:
resolvers[namespace] = handler
self._link_resolvers = resolvers
return self._link_resolvers
def parse(self, wikitext):
"""Parse `wikitext` and produce a WikiDOM tree."""
# obviously still some work to do here ;)
return wikitext
def parse_processor_args(processor_args):
"""Parse a string containing parameter assignments,
and return the corresponding dictionary.
Isolated keywords are interpreted as `bool` flags, `False` if the keyword
is prefixed with "-", `True` otherwise.
>>> parse_processor_args('ab="c de -f gh=ij" -')
{'ab': 'c de -f gh=ij'}
>>> sorted(parse_processor_args('ab=c de -f gh="ij klmn"').items())
[('ab', 'c'), ('de', True), ('f', False), ('gh', 'ij klmn')]
"""
args = WikiParser._processor_param_re.split(processor_args)
keys = [str(k) for k in args[1::3]] # used as keyword parameters
values = [v[1:-1] if v[:1] + v[-1:] in ('""', "''") else v
for v in args[2::3]]
for flags in args[::3]:
for flag in flags.strip().split():
if re.match(r'-?\w+$', flag):
if flag[0] == '-':
if len(flag) > 1:
keys.append(str(flag[1:]))
values.append(False)
else:
keys.append(str(flag))
values.append(True)
return dict(zip(keys, values))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/model.py | trac/trac/wiki/model.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
from __future__ import with_statement
from datetime import datetime
from trac.core import *
from trac.resource import Resource, ResourceSystem
from trac.util.datefmt import from_utimestamp, to_utimestamp, utc
from trac.util.translation import _
from trac.wiki.api import WikiSystem, validate_page_name
class WikiPage(object):
"""Represents a wiki page (new or existing)."""
realm = 'wiki'
def __init__(self, env, name=None, version=None, db=None):
self.env = env
if isinstance(name, Resource):
self.resource = name
name = self.resource.id
else:
if version:
version = int(version) # must be a number or None
self.resource = Resource('wiki', name, version)
self.name = name
if name:
self._fetch(name, version, db)
else:
self.version = 0
self.text = self.comment = self.author = ''
self.time = None
self.readonly = 0
self.old_text = self.text
self.old_readonly = self.readonly
def _fetch(self, name, version=None, db=None):
if version is not None:
sql = """SELECT version, time, author, text, comment, readonly
FROM wiki WHERE name=%s AND version=%s"""
args = (name, int(version))
else:
sql = """SELECT version, time, author, text, comment, readonly
FROM wiki WHERE name=%s ORDER BY version DESC LIMIT 1"""
args = (name,)
for version, time, author, text, comment, readonly in \
self.env.db_query(sql, args):
self.version = int(version)
self.author = author
self.time = from_utimestamp(time)
self.text = text
self.comment = comment
self.readonly = int(readonly) if readonly else 0
break
else:
self.version = 0
self.text = self.comment = self.author = ''
self.time = None
self.readonly = 0
exists = property(lambda self: self.version > 0)
def delete(self, version=None, db=None):
"""Delete one or all versions of a page.
:since 1.0: the `db` parameter is no longer needed and will be removed
in version 1.1.1
"""
assert self.exists, "Cannot delete non-existent page"
with self.env.db_transaction as db:
if version is None:
# Delete a wiki page completely
db("DELETE FROM wiki WHERE name=%s", (self.name,))
self.env.log.info("Deleted page %s", self.name)
else:
# Delete only a specific page version
db("DELETE FROM wiki WHERE name=%s and version=%s",
(self.name, version))
self.env.log.info("Deleted version %d of page %s", version,
self.name)
if version is None or version == self.version:
self._fetch(self.name, None)
if not self.exists:
# Invalidate page name cache
del WikiSystem(self.env).pages
# Delete orphaned attachments
from trac.attachment import Attachment
Attachment.delete_all(self.env, 'wiki', self.name)
# Let change listeners know about the deletion
if not self.exists:
for listener in WikiSystem(self.env).change_listeners:
listener.wiki_page_deleted(self)
ResourceSystem(self.env).resource_deleted(self)
else:
for listener in WikiSystem(self.env).change_listeners:
if hasattr(listener, 'wiki_page_version_deleted'):
listener.wiki_page_version_deleted(self)
ResourceSystem(self.env).resource_version_deleted(self)
def save(self, author, comment, remote_addr, t=None, db=None):
"""Save a new version of a page.
:since 1.0: the `db` parameter is no longer needed and will be removed
in version 1.1.1
"""
if not validate_page_name(self.name):
raise TracError(_("Invalid Wiki page name '%(name)s'",
name=self.name))
new_text = self.text != self.old_text
if not new_text and self.readonly == self.old_readonly:
raise TracError(_("Page not modified"))
t = t or datetime.now(utc)
with self.env.db_transaction as db:
if new_text:
db("""INSERT INTO wiki (name, version, time, author, ipnr,
text, comment, readonly)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
""", (self.name, self.version + 1, to_utimestamp(t),
author, remote_addr, self.text, comment,
self.readonly))
self.version += 1
self.resource = self.resource(version=self.version)
else:
db("UPDATE wiki SET readonly=%s WHERE name=%s",
(self.readonly, self.name))
if self.version == 1:
# Invalidate page name cache
del WikiSystem(self.env).pages
self.author = author
self.comment = comment
self.time = t
for listener in WikiSystem(self.env).change_listeners:
if self.version == 1:
listener.wiki_page_added(self)
else:
listener.wiki_page_changed(self, self.version, t, comment,
author, remote_addr)
context=dict(
version=self.version,
time=t,
comment=comment,
author=author,
remote_addr=remote_addr)
if self.version == 1:
ResourceSystem(self.env).resource_created(self, context)
else:
old_values = dict()
if self.readonly != self.old_readonly:
old_values["readonly"] = self.old_readonly
if self.text != self.old_text:
old_values["text"] = self.old_text
ResourceSystem(self.env).resource_changed(
self,
old_values,
context)
self.old_readonly = self.readonly
self.old_text = self.text
def rename(self, new_name):
"""Rename wiki page in-place, keeping the history intact.
Renaming a page this way will eventually leave dangling references
to the old page - which litterally doesn't exist anymore.
"""
assert self.exists, "Cannot rename non-existent page"
if not validate_page_name(new_name):
raise TracError(_("Invalid Wiki page name '%(name)s'",
name=new_name))
old_name = self.name
with self.env.db_transaction as db:
new_page = WikiPage(self.env, new_name)
if new_page.exists:
raise TracError(_("Can't rename to existing %(name)s page.",
name=new_name))
db("UPDATE wiki SET name=%s WHERE name=%s", (new_name, old_name))
# Invalidate page name cache
del WikiSystem(self.env).pages
# Reparent attachments
from trac.attachment import Attachment
Attachment.reparent_all(self.env, 'wiki', old_name, 'wiki',
new_name)
self.name = new_name
self.env.log.info('Renamed page %s to %s', old_name, new_name)
for listener in WikiSystem(self.env).change_listeners:
if hasattr(listener, 'wiki_page_renamed'):
listener.wiki_page_renamed(self, old_name)
ResourceSystem(self.env).resource_changed(
self,
dict(name=old_name)
)
def get_history(self, db=None):
"""Retrieve the edit history of a wiki page.
:since 1.0: the `db` parameter is no longer needed and will be removed
in version 1.1.1
"""
for version, ts, author, comment, ipnr in self.env.db_query("""
SELECT version, time, author, comment, ipnr FROM wiki
WHERE name=%s AND version<=%s ORDER BY version DESC
""", (self.name, self.version)):
yield version, from_utimestamp(ts), author, comment, ipnr
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/intertrac.py | trac/trac/wiki/intertrac.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Christian Boos <cboos@edgewall.org>
import re
from genshi.builder import Element, Fragment, tag
from trac.config import ConfigSection
from trac.core import *
from trac.perm import PermissionError
from trac.util.html import find_element
from trac.util.translation import _, N_
from trac.web.api import IRequestHandler
from trac.wiki.api import IWikiMacroProvider
from trac.wiki.formatter import extract_link
class InterTracDispatcher(Component):
"""InterTrac dispatcher."""
implements(IRequestHandler, IWikiMacroProvider)
intertrac_section = ConfigSection('intertrac',
"""This section configures InterTrac prefixes. Options in this section
whose name contain a "." define aspects of the InterTrac prefix
corresponding to the option name up to the ".". Options whose name
don't contain a "." define an alias.
The `.url` is mandatory and is used for locating the other Trac.
This can be a relative URL in case that Trac environment is located
on the same server.
The `.title` information is used for providing a useful tooltip when
moving the cursor over an InterTrac link.
The `.compat` option can be used to activate or disable a
''compatibility'' mode:
* If the targeted Trac is running a version below
[trac:milestone:0.10 0.10] ([trac:r3526 r3526] to be precise), then
it doesn't know how to dispatch an InterTrac link, and it's up to
the local Trac to prepare the correct link. Not all links will work
that way, but the most common do. This is called the compatibility
mode, and is `true` by default.
* If you know that the remote Trac knows how to dispatch InterTrac
links, you can explicitly disable this compatibility mode and then
''any'' TracLinks can become InterTrac links.
Example configuration:
{{{
[intertrac]
# -- Example of setting up an alias:
t = trac
# -- Link to an external Trac:
trac.title = Edgewall's Trac for Trac
trac.url = http://trac.edgewall.org
}}}
""")
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'^/intertrac/(.*)', req.path_info)
if match:
if match.group(1):
req.args['link'] = match.group(1)
return True
def process_request(self, req):
link = req.args.get('link', '')
parts = link.split(':', 1)
if len(parts) > 1:
resolver, target = parts
if target[:1] + target[-1:] not in ('""', "''"):
link = '%s:"%s"' % (resolver, target)
from trac.web.chrome import web_context
link_frag = extract_link(self.env, web_context(req), link)
if isinstance(link_frag, (Element, Fragment)):
elt = find_element(link_frag, 'href')
if elt is None: # most probably no permissions to view
raise PermissionError(_("Can't view %(link)s:", link=link))
href = elt.attrib.get('href')
else:
href = req.href(link.rstrip(':'))
req.redirect(href)
# IWikiMacroProvider methods
def get_macros(self):
yield 'InterTrac'
def get_macro_description(self, name):
return 'messages', N_("Provide a list of known InterTrac prefixes.")
def expand_macro(self, formatter, name, content):
intertracs = {}
for key, value in self.intertrac_section.options():
idx = key.rfind('.')
if idx > 0: # 0 itself doesn't help much: .xxx = ...
prefix, attribute = key[:idx], key[idx+1:]
intertrac = intertracs.setdefault(prefix, {})
intertrac[attribute] = value
else:
intertracs[key] = value # alias
if 'trac' not in intertracs:
intertracs['trac'] = {'title': _('The Trac Project'),
'url': 'http://trac.edgewall.org'}
def generate_prefix(prefix):
intertrac = intertracs[prefix]
if isinstance(intertrac, basestring):
yield tag.tr(tag.td(tag.b(prefix)),
tag.td('Alias for ', tag.b(intertrac)))
else:
url = intertrac.get('url', '')
if url:
title = intertrac.get('title', url)
yield tag.tr(tag.td(tag.a(tag.b(prefix),
href=url + '/timeline')),
tag.td(tag.a(title, href=url)))
return tag.table(class_="wiki intertrac")(
tag.tr(tag.th(tag.em('Prefix')), tag.th(tag.em('Trac Site'))),
[generate_prefix(p) for p in sorted(intertracs.keys())])
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/formatter.py | trac/trac/wiki/formatter.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2005 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2005-2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
# Christian Boos <cboos@edgewall.org>
import re
import os
from StringIO import StringIO
from genshi.builder import tag, Element
from genshi.core import Stream, Markup, escape
from genshi.input import HTMLParser, ParseError
from genshi.util import plaintext
from trac.core import *
from trac.mimeview import *
from trac.resource import get_relative_resource, get_resource_url
from trac.util import arity
from trac.util.text import exception_to_unicode, shorten_line, to_unicode, \
unicode_quote, unicode_quote_plus, unquote_label
from trac.util.html import TracHTMLSanitizer
from trac.util.translation import _
from trac.wiki.api import WikiSystem, parse_args
from trac.wiki.parser import WikiParser, parse_processor_args
__all__ = ['wiki_to_html', 'wiki_to_oneliner', 'wiki_to_outline',
'Formatter', 'format_to', 'format_to_html', 'format_to_oneliner',
'extract_link', 'split_url_into_path_query_fragment',
'concat_path_query_fragment']
def system_message(msg, text=None):
return tag.div(tag.strong(msg), text and tag.pre(text),
class_="system-message")
def split_url_into_path_query_fragment(target):
"""Split a target along `?` and `#` in `(path, query, fragment)`.
>>> split_url_into_path_query_fragment('http://path?a=1&b=2#frag?ment')
('http://path', '?a=1&b=2', '#frag?ment')
>>> split_url_into_path_query_fragment('http://path#frag?ment')
('http://path', '', '#frag?ment')
>>> split_url_into_path_query_fragment('http://path?a=1&b=2')
('http://path', '?a=1&b=2', '')
>>> split_url_into_path_query_fragment('http://path')
('http://path', '', '')
"""
query = fragment = ''
idx = target.find('#')
if idx >= 0:
target, fragment = target[:idx], target[idx:]
idx = target.find('?')
if idx >= 0:
target, query = target[:idx], target[idx:]
return (target, query, fragment)
def concat_path_query_fragment(path, query, fragment=None):
"""Assemble `path`, `query` and `fragment` into a proper URL.
Can be used to re-assemble an URL decomposed using
`split_url_into_path_query_fragment` after modification.
>>> concat_path_query_fragment('/wiki/page', '?version=1')
'/wiki/page?version=1'
>>> concat_path_query_fragment('/wiki/page#a', '?version=1', '#b')
'/wiki/page?version=1#b'
>>> concat_path_query_fragment('/wiki/page?version=1#a', '?format=txt')
'/wiki/page?version=1&format=txt#a'
>>> concat_path_query_fragment('/wiki/page?version=1', '&format=txt')
'/wiki/page?version=1&format=txt'
>>> concat_path_query_fragment('/wiki/page?version=1', 'format=txt')
'/wiki/page?version=1&format=txt'
>>> concat_path_query_fragment('/wiki/page?version=1#a', '?format=txt', '#')
'/wiki/page?version=1&format=txt'
"""
p, q, f = split_url_into_path_query_fragment(path)
if query:
q += ('&' if q else '?') + query.lstrip('?&')
if fragment:
f = fragment
return p + q + ('' if f == '#' else f)
def _markup_to_unicode(markup):
stream = None
if isinstance(markup, Element):
stream = markup.generate()
elif isinstance(markup, Stream):
stream = markup
if stream:
markup = stream.render('xhtml', encoding=None, strip_whitespace=False)
return to_unicode(markup)
class ProcessorError(TracError):
pass
class WikiProcessor(object):
_code_block_re = re.compile('^<div(?:\s+class="([^"]+)")?>(.*)</div>$')
_block_elem_re = re.compile(r'^\s*<(?:div|table)(?:\s+[^>]+)?>',
re.I | re.M)
def __init__(self, formatter, name, args=None):
"""Find the processor by name
:param formatter: the formatter embedding a call for this processor
:param name: the name of the processor
:param args: extra parameters for the processor
(since 0.11)
"""
self.formatter = formatter
self.env = formatter.env
self.name = name
self.args = args
self.error = None
self.macro_provider = None
# FIXME: move these tables outside of __init__
builtin_processors = {'html': self._html_processor,
'htmlcomment': self._htmlcomment_processor,
'default': self._default_processor,
'comment': self._comment_processor,
'div': self._div_processor,
'rtl': self._rtl_processor,
'span': self._span_processor,
'Span': self._span_processor,
'td': self._td_processor,
'th': self._th_processor,
'tr': self._tr_processor,
'table': self._table_processor,
}
self.inline_check = {'html': self._html_is_inline,
'htmlcomment': True, 'comment': True,
'span': True, 'Span': True,
}.get(name)
self._sanitizer = TracHTMLSanitizer(formatter.wiki.safe_schemes)
self.processor = builtin_processors.get(name)
if not self.processor:
# Find a matching wiki macro
for macro_provider in WikiSystem(self.env).macro_providers:
for macro_name in macro_provider.get_macros() or []:
if self.name == macro_name:
if hasattr(macro_provider, 'expand_macro'):
self.processor = self._macro_processor
else:
self.processor = self._legacy_macro_processor
self.macro_provider = macro_provider
self.inline_check = getattr(macro_provider, 'is_inline',
False)
break
if not self.processor:
# Find a matching mimeview renderer
from trac.mimeview.api import Mimeview
mimeview = Mimeview(formatter.env)
for renderer in mimeview.renderers:
if renderer.get_quality_ratio(self.name) > 1:
self.processor = self._mimeview_processor
break
if not self.processor:
mimetype = mimeview.get_mimetype(self.name)
if mimetype:
self.name = mimetype
self.processor = self._mimeview_processor
if not self.processor:
self.processor = self._default_processor
self.error = "No macro or processor named '%s' found" % name
# inline checks
def _html_is_inline(self, text):
if text:
tag = text[1:].lstrip()
idx = tag.find(' ')
if idx > -1:
tag = tag[:idx]
return tag.lower() in ('a', 'span', 'bdo', 'img',
'big', 'small', 'font',
'tt', 'i', 'b', 'u', 's', 'strike',
'em', 'strong', 'dfn', 'code', 'q',
'samp', 'kbd', 'var', 'cite', 'abbr',
'acronym', 'sub', 'sup')
# builtin processors
def _comment_processor(self, text):
return ''
def _default_processor(self, text):
return tag.pre(text, class_="wiki")
def _html_processor(self, text):
if WikiSystem(self.env).render_unsafe_content:
return Markup(text)
try:
stream = Stream(HTMLParser(StringIO(text)))
return (stream | self._sanitizer).render('xhtml', encoding=None)
except ParseError, e:
self.env.log.warn(e)
line = unicode(text).splitlines()[e.lineno - 1].strip()
return system_message(_('HTML parsing error: %(message)s',
message=escape(e.msg)), line)
def _htmlcomment_processor(self, text):
if "--" in text:
return system_message(_('Error: Forbidden character sequence '
'"--" in htmlcomment wiki code block'))
return Markup('<!--\n%s-->\n' % text)
def _elt_processor(self, eltname, format_to, text):
# Note: as long as _processor_param_re is not re.UNICODE, **args is OK.
# Also, parse_args is using strict mode when processing [[span(...)]].
elt = getattr(tag, eltname)(**(self.args or {}))
if not WikiSystem(self.env).render_unsafe_content:
sanitized_elt = getattr(tag, eltname)
for (k, data, pos) in (Stream(elt) | self._sanitizer):
sanitized_elt.attrib = data[1]
break # only look at START (elt,attrs)
elt = sanitized_elt
elt.append(format_to(self.env, self.formatter.context, text))
return elt
def _div_processor(self, text):
if not self.args:
self.args = {}
self.args.setdefault('class', 'wikipage')
return self._elt_processor('div', format_to_html, text)
def _rtl_processor(self, text):
if not self.args:
self.args = {}
self.args['class'] = ('rtl ' + self.args.get('class', '')).rstrip()
return self._elt_processor('div', format_to_html, text)
def _span_processor(self, text):
if self.args is None:
args, self.args = parse_args(text, strict=True)
text = ', '.join(args)
return self._elt_processor('span', format_to_oneliner, text)
def _td_processor(self, text):
return self._tablecell_processor('td', text)
def _th_processor(self, text):
return self._tablecell_processor('th', text)
def _tr_processor(self, text):
try:
elt = self._elt_processor('tr', self._format_row, text)
self.formatter.open_table()
return elt
except ProcessorError, e:
return system_message(e)
def _table_processor(self, text):
if not self.args:
self.args = {}
self.args.setdefault('class', 'wiki')
try:
return self._elt_processor('table', self._format_table, text)
except ProcessorError, e:
return system_message(e)
def _tablecell_processor(self, eltname, text):
self.formatter.open_table_row()
return self._elt_processor(eltname, format_to_html, text)
_has_multiple_tables_re = re.compile(r"</table>.*?<table",
re.MULTILINE | re.DOTALL)
_inner_table_re = re.compile(r"""\s*
<table[^>]*>\s*
((?:<tr[^>]*>)?
(.*?)
(?:</tr>)?)\s*
</table>\s*$
""", re.MULTILINE | re.DOTALL | re.VERBOSE)
# Note: the need for "parsing" that crude way the formatted content
# will go away as soon as we have a WikiDOM to manipulate...
def _parse_inner_table(self, text):
if self._has_multiple_tables_re.search(text):
raise ProcessorError(_("!#%(name)s must contain at most one table",
name=self.name))
match = self._inner_table_re.match(text)
if not match:
raise ProcessorError(_("!#%(name)s must contain at least one table"
" cell (and table cells only)",
name=self.name))
return Markup(match.group(1 if self.name == 'table' else 2))
def _format_row(self, env, context, text):
if text:
out = StringIO()
Formatter(env, context).format(text, out)
text = self._parse_inner_table(out.getvalue())
return text
def _format_table(self, env, context, text):
if text:
out = StringIO()
Formatter(env, context).format(text, out)
text = self._parse_inner_table(out.getvalue())
return text
# generic processors
def _legacy_macro_processor(self, text): # TODO: remove in 0.12
self.env.log.warning('Executing pre-0.11 Wiki macro %s by provider %s'
% (self.name, self.macro_provider))
return self.macro_provider.render_macro(self.formatter.req, self.name,
text)
def _macro_processor(self, text):
self.env.log.debug('Executing Wiki macro %s by provider %s'
% (self.name, self.macro_provider))
if arity(self.macro_provider.expand_macro) == 4:
return self.macro_provider.expand_macro(self.formatter, self.name,
text, self.args)
else:
return self.macro_provider.expand_macro(self.formatter, self.name,
text)
def _mimeview_processor(self, text):
return Mimeview(self.env).render(self.formatter.context,
self.name, text)
# TODO: use convert('text/html') instead of render
def process(self, text, in_paragraph=False):
if self.error:
text = system_message(tag('Error: Failed to load processor ',
tag.code(self.name)),
self.error)
else:
text = self.processor(text)
return text or ''
def is_inline(self, text):
if callable(self.inline_check):
return self.inline_check(text)
else:
return self.inline_check
def ensure_inline(self, text):
content_for_span = None
interrupt_paragraph = False
if isinstance(text, Element):
tagname = text.tag.lower()
if tagname == 'div':
class_ = text.attrib.get('class', '')
if class_ and 'code' in class_:
content_for_span = text.children
else:
interrupt_paragraph = True
elif tagname == 'table':
interrupt_paragraph = True
else:
# FIXME: do something smarter for Streams
text = _markup_to_unicode(text)
match = re.match(self._code_block_re, text)
if match:
if match.group(1) and 'code' in match.group(1):
content_for_span = match.group(2)
else:
interrupt_paragraph = True
elif re.match(self._block_elem_re, text):
interrupt_paragraph = True
if content_for_span:
text = tag.span(class_='code-block')(*content_for_span)
elif interrupt_paragraph:
text = "</p>%s<p>" % _markup_to_unicode(text)
return text
class Formatter(object):
"""Base Wiki formatter.
Parses and formats wiki text, in a given `Context`.
"""
flavor = 'default'
# 0.10 compatibility
INTERTRAC_SCHEME = WikiParser.INTERTRAC_SCHEME
QUOTED_STRING = WikiParser.QUOTED_STRING
LINK_SCHEME = WikiParser.LINK_SCHEME
def __init__(self, env, context):
"""Note: `req` is still temporarily used."""
self.env = env
self.context = context.child()
self.context.set_hints(disable_warnings=True)
self.req = context.req
self.href = context.href
self.resource = context.resource
self.perm = context.perm
self.wiki = WikiSystem(self.env)
self.wikiparser = WikiParser(self.env)
self._anchors = {}
self._open_tags = []
self._safe_schemes = None
if not self.wiki.render_unsafe_content:
self._safe_schemes = set(self.wiki.safe_schemes)
def split_link(self, target):
return split_url_into_path_query_fragment(target)
# -- Pre- IWikiSyntaxProvider rules (Font styles)
_indirect_tags = {
'MM_BOLD': ('<strong>', '</strong>'),
'WC_BOLD': ('<strong>', '</strong>'),
'MM_ITALIC': ('<em>', '</em>'),
'WC_ITALIC': ('<em>', '</em>'),
'MM_UNDERLINE': ('<span class="underline">', '</span>'),
'MM_STRIKE': ('<del>', '</del>'),
'MM_SUBSCRIPT': ('<sub>', '</sub>'),
'MM_SUPERSCRIPT': ('<sup>', '</sup>'),
}
def _get_open_tag(self, tag):
"""Retrieve opening tag for direct or indirect `tag`."""
if not isinstance(tag, tuple):
tag = self._indirect_tags[tag]
return tag[0]
def _get_close_tag(self, tag):
"""Retrieve closing tag for direct or indirect `tag`."""
if not isinstance(tag, tuple):
tag = self._indirect_tags[tag]
return tag[1]
def tag_open_p(self, tag):
"""Do we currently have any open tag with `tag` as end-tag?"""
return tag in self._open_tags
def flush_tags(self):
while self._open_tags != []:
self.out.write(self._get_close_tag(self._open_tags.pop()))
def open_tag(self, tag_open, tag_close=None):
"""Open an inline style tag.
If `tag_close` is not specified, `tag_open` is an indirect tag (0.12)
"""
if tag_close:
self._open_tags.append((tag_open, tag_close))
else:
self._open_tags.append(tag_open)
tag_open = self._get_open_tag(tag_open)
return tag_open
def close_tag(self, open_tag, close_tag=None):
"""Open a inline style tag.
If `close_tag` is not specified, it's an indirect tag (0.12)
"""
tmp = ''
for i in xrange(len(self._open_tags) - 1, -1, -1):
tag = self._open_tags[i]
tmp += self._get_close_tag(tag)
if (open_tag == tag,
(open_tag, close_tag) == tag)[bool(close_tag)]:
del self._open_tags[i]
for j in xrange(i, len(self._open_tags)):
tmp += self._get_open_tag(self._open_tags[j])
break
return tmp
def _indirect_tag_handler(self, match, tag):
"""Handle binary inline style tags (indirect way, 0.12)"""
if self.tag_open_p(tag):
return self.close_tag(tag)
else:
return self.open_tag(tag)
def _bolditalic_formatter(self, match, fullmatch):
italic_open = self.tag_open_p('MM_ITALIC')
tmp = ''
if italic_open:
tmp += self._get_close_tag('MM_ITALIC')
self.close_tag('MM_ITALIC')
tmp += self._bold_formatter(match, fullmatch)
if not italic_open:
tmp += self.open_tag('MM_ITALIC')
return tmp
def _bold_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_BOLD')
def _bold_wc_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'WC_BOLD')
def _italic_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_ITALIC')
def _italic_wc_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'WC_ITALIC')
def _underline_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_UNDERLINE')
def _strike_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_STRIKE')
def _subscript_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_SUBSCRIPT')
def _superscript_formatter(self, match, fullmatch):
return self._indirect_tag_handler(match, 'MM_SUPERSCRIPT')
def _inlinecode_formatter(self, match, fullmatch):
return tag.tt(fullmatch.group('inline'))
def _inlinecode2_formatter(self, match, fullmatch):
return tag.tt(fullmatch.group('inline2'))
# pre-0.12 public API (no longer used by Trac itself but kept for plugins)
def simple_tag_handler(self, match, open_tag, close_tag):
"""Generic handler for simple binary style tags"""
if self.tag_open_p((open_tag, close_tag)):
return self.close_tag(open_tag, close_tag)
else:
self.open_tag(open_tag, close_tag)
return open_tag
# -- Post- IWikiSyntaxProvider rules
# WikiCreole line brekas
def _linebreak_wc_formatter(self, match, fullmatch):
return '<br />'
# E-mails
def _email_formatter(self, match, fullmatch):
from trac.web.chrome import Chrome
omatch = Chrome(self.env).format_emails(self.context, match)
if omatch == match: # not obfuscated, make a link
return self._make_mail_link('mailto:'+match, match)
else:
return omatch
# HTML escape of &, < and >
def _htmlescape_formatter(self, match, fullmatch):
return "&" if match == "&" else "<" if match == "<" else ">"
# Short form (shref) and long form (lhref) of TracLinks
def _shrefbr_formatter(self, match, fullmatch):
ns = fullmatch.group('snsbr')
target = unquote_label(fullmatch.group('stgtbr'))
match = match[1:-1]
return '<%s>' % \
self._make_link(ns, target, match, match, fullmatch)
def _shref_formatter(self, match, fullmatch):
ns = fullmatch.group('sns')
target = unquote_label(fullmatch.group('stgt'))
return self._make_link(ns, target, match, match, fullmatch)
def _lhref_formatter(self, match, fullmatch):
rel = fullmatch.group('rel')
ns = fullmatch.group('lns')
target = unquote_label(fullmatch.group('ltgt'))
label = fullmatch.group('label')
return self._make_lhref_link(match, fullmatch, rel, ns, target, label)
def _make_lhref_link(self, match, fullmatch, rel, ns, target, label):
if not label: # e.g. `[http://target]` or `[wiki:target]`
if target:
if target.startswith('//'): # for `[http://target]`
label = ns + ':' + target # use `http://target`
else: # for `wiki:target`
label = target.lstrip('/') # use only `target`
else: # e.g. `[search:]`
label = ns
else:
label = unquote_label(label)
if rel:
if not label:
label = self.wiki.make_label_from_target(rel)
path, query, fragment = self.split_link(rel)
if path.startswith('//'):
path = '/' + path.lstrip('/')
elif path.startswith('/'):
path = self.href + path
else:
resource = get_relative_resource(self.resource, path)
path = get_resource_url(self.env, resource, self.href)
if resource.id:
target = concat_path_query_fragment(unicode(resource.id),
query, fragment)
if resource.realm == 'wiki':
target = '/' + target # Avoid wiki page scoping
return self._make_link(resource.realm, target, match,
label, fullmatch)
return tag.a(label,
href=concat_path_query_fragment(path, query, fragment))
else:
return self._make_link(ns or 'wiki', target or '', match, label,
fullmatch)
def _make_link(self, ns, target, match, label, fullmatch):
# first check for an alias defined in trac.ini
ns = self.env.config['intertrac'].get(ns, ns)
if ns in self.wikiparser.link_resolvers:
resolver = self.wikiparser.link_resolvers[ns]
if arity(resolver) == 5:
return resolver(self, ns, target, escape(label, False),
fullmatch)
else:
return resolver(self, ns, target, escape(label, False))
elif ns == "mailto":
from trac.web.chrome import Chrome
chrome = Chrome(self.env)
if chrome.never_obfuscate_mailto:
otarget, olabel = target, label
else:
otarget = chrome.format_emails(self.context, target)
olabel = chrome.format_emails(self.context, label)
if (otarget, olabel) == (target, label):
return self._make_mail_link('mailto:'+target, label)
else:
return olabel or otarget
elif target.startswith('//'):
if self._safe_schemes is None or ns in self._safe_schemes:
return self._make_ext_link(ns + ':' + target, label)
else:
return escape(match)
else:
return self._make_intertrac_link(ns, target, label) or \
self._make_interwiki_link(ns, target, label) or \
escape(match)
def _make_intertrac_link(self, ns, target, label):
res = self.get_intertrac_url(ns, target)
if res:
return self._make_ext_link(res[0], label, res[1])
def get_intertrac_url(self, ns, target):
intertrac = self.env.config['intertrac']
url = intertrac.get(ns + '.url')
if not url and ns == 'trac':
url = 'http://trac.edgewall.org'
if url:
name = intertrac.get(ns + '.title', 'Trac project %s' % ns)
compat = intertrac.getbool(ns + '.compat', 'false')
# set `compat` default to False now that 0.10 is widely used
# TODO: remove compatibility code completely for 1.0 release
if compat:
sep = target.find(':')
if sep != -1:
url = '%s/%s/%s' % (url, target[:sep], target[sep + 1:])
else:
url = '%s/search?q=%s' % (url, unicode_quote_plus(target))
else:
url = '%s/intertrac/%s' % (url, unicode_quote(target))
if target:
title = _('%(target)s in %(name)s', target=target, name=name)
else:
title = name
return (url, title)
def shorthand_intertrac_helper(self, ns, target, label, fullmatch):
if fullmatch: # short form
it_group = fullmatch.groupdict().get('it_' + ns)
if it_group:
alias = it_group.strip()
intertrac = self.env.config['intertrac']
target = '%s:%s' % (ns, target[len(it_group):])
return self._make_intertrac_link(intertrac.get(alias, alias),
target, label) or label
def _make_interwiki_link(self, ns, target, label):
from trac.wiki.interwiki import InterWikiMap
interwiki = InterWikiMap(self.env)
if ns in interwiki:
url, title = interwiki.url(ns, target)
return self._make_ext_link(url, label, title)
def _make_ext_link(self, url, text, title=''):
local_url = self.env.project_url or \
(self.req or self.env).abs_href.base
if not url.startswith(local_url):
return tag.a(tag.span(u'\u200b', class_="icon"), text,
class_="ext-link", href=url, title=title or None)
else:
return tag.a(text, href=url, title=title or None)
def _make_mail_link(self, url, text, title=''):
return tag.a(tag.span(u'\u200b', class_="icon"), text,
class_="mail-link", href=url, title=title or None)
# Anchors
def _anchor_formatter(self, match, fullmatch):
anchor = fullmatch.group('anchorname')
label = fullmatch.group('anchorlabel') or ''
if label:
label = format_to_oneliner(self.env, self.context, label)
return '<span class="wikianchor" id="%s">%s</span>' % (anchor, label)
# WikiMacros or WikiCreole links
def _macrolink_formatter(self, match, fullmatch):
# check for a known [[macro]]
macro_or_link = match[2:-2]
if macro_or_link.startswith('=#'):
fullmatch = WikiParser._set_anchor_wc_re.match(macro_or_link)
if fullmatch:
return self._anchor_formatter(macro_or_link, fullmatch)
fullmatch = WikiParser._macro_re.match(macro_or_link)
if fullmatch:
name = fullmatch.group('macroname')
args = fullmatch.group('macroargs')
macro = False # not a macro
macrolist = name[-1] == '?'
if name.lower() == 'br' or name == '?':
macro = None
else:
macro = WikiProcessor(self, (name, name[:-1])[macrolist])
if macro.error:
macro = False
if macro is not False:
if macrolist:
macro = WikiProcessor(self, 'MacroList')
return self._macro_formatter(match, fullmatch, macro)
fullmatch = WikiParser._creolelink_re.match(macro_or_link)
return self._lhref_formatter(match, fullmatch)
def _macro_formatter(self, match, fullmatch, macro, only_inline=False):
name = fullmatch.group('macroname')
if name.lower() == 'br':
return '<br />'
if name and name[-1] == '?': # Macro?() shortcut for MacroList(Macro)
args = name[:-1] or '*'
else:
args = fullmatch.group('macroargs')
try:
return macro.ensure_inline(macro.process(args))
except Exception, e:
self.env.log.error('Macro %s(%s) failed: %s' %
(name, args, exception_to_unicode(e, traceback=True)))
return system_message('Error: Macro %s(%s) failed' % (name, args),
e)
# Headings
def _parse_heading(self, match, fullmatch, shorten):
match = match.strip()
hdepth = fullmatch.group('hdepth')
depth = len(hdepth)
anchor = fullmatch.group('hanchor') or ''
htext = fullmatch.group('htext').strip()
if htext.endswith(hdepth):
htext = htext[:-depth]
heading = format_to_oneliner(self.env, self.context, htext, False)
if anchor:
anchor = anchor[1:]
else:
sans_markup = plaintext(heading, keeplinebreaks=False)
anchor = WikiParser._anchor_re.sub('', sans_markup)
if not anchor or anchor[0].isdigit() or anchor[0] in '.-':
# an ID must start with a Name-start character in XHTML
anchor = 'a' + anchor # keeping 'a' for backward compat
i = 1
anchor_base = anchor
while anchor in self._anchors:
anchor = anchor_base + str(i)
i += 1
self._anchors[anchor] = True
if shorten:
heading = format_to_oneliner(self.env, self.context, htext, True)
return (depth, heading, anchor)
def _heading_formatter(self, match, fullmatch):
self.close_table()
self.close_paragraph()
self.close_indentation()
self.close_list()
self.close_def_list()
depth, heading, anchor = self._parse_heading(match, fullmatch, False)
self.out.write('<h%d id="%s">%s</h%d>' %
(depth, anchor, heading, depth))
# Generic indentation (as defined by lists and quotes)
def _set_tab(self, depth):
"""Append a new tab if needed and truncate tabs deeper than `depth`
given: -*-----*--*---*--
setting: *
results in: -*-----*-*-------
"""
tabstops = []
for ts in self._tabstops:
if ts >= depth:
break
tabstops.append(ts)
tabstops.append(depth)
self._tabstops = tabstops
# Lists
def _list_formatter(self, match, fullmatch):
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/__init__.py | trac/trac/wiki/__init__.py | from trac.wiki.api import *
from trac.wiki.formatter import *
from trac.wiki.intertrac import *
from trac.wiki.model import *
from trac.wiki.parser import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/wikisyntax.py | trac/trac/wiki/tests/wikisyntax.py | # -*- coding: utf-8 -*-
from datetime import datetime
import unittest
from trac.util.datefmt import utc
from trac.wiki.model import WikiPage
from trac.wiki.tests import formatter
TEST_CASES = u"""
============================== wiki: link resolver
wiki:TestPage
wiki:TestPage/
wiki:/TestPage
[wiki:/TestPage]
[wiki:/TestPage ]
[wiki:/TestPage\u200B]
[wiki:/TestPage /TestPage]
wiki:"Space 1 23"
wiki:"C'est l'\xe9t\xe9"
wiki:MissingPage
wiki:12
wiki:abc
------------------------------
<p>
<a class="wiki" href="/wiki/TestPage">wiki:TestPage</a>
<a class="wiki" href="/wiki/TestPage">wiki:TestPage/</a>
<a class="wiki" href="/wiki/TestPage">wiki:/TestPage</a>
<a class="wiki" href="/wiki/TestPage">TestPage</a>
<a class="wiki" href="/wiki/TestPage">TestPage</a>
<a class="wiki" href="/wiki/TestPage">TestPage</a>
<a class="wiki" href="/wiki/TestPage">/TestPage</a>
<a class="wiki" href="/wiki/Space%201%2023">wiki:"Space 1 23"</a>
<a class="wiki" href="/wiki/C'est%20l'%C3%A9t%C3%A9">wiki:"C'est l'\xe9t\xe9"</a>
<a class="missing wiki" href="/wiki/MissingPage" rel="nofollow">wiki:MissingPage?</a>
<a class="missing wiki" href="/wiki/12" rel="nofollow">wiki:12?</a>
<a class="missing wiki" href="/wiki/abc" rel="nofollow">wiki:abc?</a>
</p>
------------------------------
============================== wiki: link resolver + query and fragment
wiki:TestPage?format=txt
wiki:TestPage/?version=12
wiki:TestPage/?action=diff&version=12
wiki:"Space 1 23#heading"
------------------------------
<p>
<a class="wiki" href="/wiki/TestPage?format=txt">wiki:TestPage?format=txt</a>
<a class="wiki" href="/wiki/TestPage?version=12">wiki:TestPage/?version=12</a>
<a class="wiki" href="/wiki/TestPage?action=diff&version=12">wiki:TestPage/?action=diff&version=12</a>
<a class="wiki" href="/wiki/Space%201%2023#heading">wiki:"Space 1 23#heading"</a>
</p>
------------------------------
============================== WikiPageNames conformance
CamelCase AlabamA ABc AlaBamA FooBar
------------------------------
<p>
<a class="missing wiki" href="/wiki/CamelCase" rel="nofollow">CamelCase?</a> AlabamA ABc AlaBamA <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>
</p>
------------------------------
============================== WikiPageNames conformance (unicode)
SmÅogstore should produce a link
and so should wiki:ÜberflüssigkeitsTheorie
------------------------------
<p>
<a class="missing wiki" href="/wiki/Sm%C3%85ogstore" rel="nofollow">SmÅogstore?</a> should produce a link
and so should <a class="missing wiki" href="/wiki/%C3%9Cberfl%C3%BCssigkeitsTheorie" rel="nofollow">wiki:ÜberflüssigkeitsTheorie?</a>
</p>
------------------------------
============================== More WikiPageNames conformance
CamelCase,CamelCase.CamelCase: CamelCase
But not CamelCase2
nor CamelCase_
------------------------------
<p>
<a class="missing wiki" href="/wiki/CamelCase" rel="nofollow">CamelCase?</a>,<a class="missing wiki" href="/wiki/CamelCase" rel="nofollow">CamelCase?</a>.<a class="missing wiki" href="/wiki/CamelCase" rel="nofollow">CamelCase?</a>: <a class="missing wiki" href="/wiki/CamelCase" rel="nofollow">CamelCase?</a>
But not CamelCase2
nor CamelCase_
</p>
------------------------------
============================== Escaping WikiPageNames
!CamelCase
------------------------------
<p>
CamelCase
</p>
------------------------------
============================== WikiPageNames endings
foo (FooBar )
foo FooBar: something
foo FooBar.
FooBar, foo
foo FooBar;
foo FooBar!
foo FooBar?
foo (FooBar)
foo {FooBar}
foo 'FooBar'
foo "FooBar"
foo [FooBar]
------------------------------
<p>
foo (<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a> )
foo <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>: something
foo <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>.
<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>, foo
foo <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>;
foo <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>!
foo <a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>?
foo (<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>)
foo {<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>}
foo '<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>'
foo "<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>"
foo [<a class="missing wiki" href="/wiki/FooBar" rel="nofollow">FooBar?</a>]
</p>
------------------------------
============================== WikiPageNames counter examples
A0B1, ST62T53C6, IR32V1H000
------------------------------
<p>
A0B1, ST62T53C6, IR32V1H000
</p>
------------------------------
============================== WikiPageNames with fragment identifier
SandBox#heading-fixed-id
wiki:TracSubversion#TracandSubversion1.3.1. etc.
TracSubversion#TracandSubversion1.3.1. etc.
------------------------------
<p>
<a class="missing wiki" href="/wiki/SandBox#heading-fixed-id" rel="nofollow">SandBox#heading-fixed-id?</a>
</p>
<p>
<a class="missing wiki" href="/wiki/TracSubversion#TracandSubversion1.3.1" rel="nofollow">wiki:TracSubversion#TracandSubversion1.3.1?</a>. etc.
<a class="missing wiki" href="/wiki/TracSubversion#TracandSubversion1.3.1" rel="nofollow">TracSubversion#TracandSubversion1.3.1?</a>. etc.
</p>
------------------------------
============================== WikiPageNames with fragment id (performance test)
BillOfMaterials#get_bill_of_materials_from_room_xml(fpxml=nil)
[BillOfMaterials#get_bill_of_materials_from_room_xml(fpxml=nil)]
[BillOfMaterials#get_bill_of_materials_from_room_xml(fpxml=nil) speed]
------------------------------
<p>
<a class="missing wiki" href="/wiki/BillOfMaterials#get_bill_of_materials_from_room_xml" rel="nofollow">BillOfMaterials#get_bill_of_materials_from_room_xml?</a>(fpxml=nil)
</p>
<p>
[<a class="missing wiki" href="/wiki/BillOfMaterials#get_bill_of_materials_from_room_xml" rel="nofollow">BillOfMaterials#get_bill_of_materials_from_room_xml?</a>(fpxml=nil)]
</p>
<p>
[<a class="missing wiki" href="/wiki/BillOfMaterials#get_bill_of_materials_from_room_xml" rel="nofollow">BillOfMaterials#get_bill_of_materials_from_room_xml?</a>(fpxml=nil) speed]
</p>
------------------------------
============================== WikiPageNames counter examples (paths)
/absolute/path/is/NotWiki and relative/path/is/NotWiki and ../higher/is/NotWiki
but ThisIs/SubWiki and now This/Also
and ../Relative/Camel or /Absolute/Camel as well
------------------------------
<p>
/absolute/path/is/NotWiki and relative/path/is/NotWiki and ../higher/is/NotWiki
but <a class="missing wiki" href="/wiki/ThisIs/SubWiki" rel="nofollow">ThisIs/SubWiki?</a> and now <a class="missing wiki" href="/wiki/This/Also" rel="nofollow">This/Also?</a>
and <a class="missing wiki" href="/wiki/Relative/Camel" rel="nofollow">../Relative/Camel?</a> or <a class="missing wiki" href="/wiki/Absolute/Camel" rel="nofollow">/Absolute/Camel?</a> as well
</p>
------------------------------
============================== WikiPageNames counter examples (numbers)
8FjBpOmy
anotherWikiPageName
------------------------------
<p>
8FjBpOmy
anotherWikiPageName
</p>
------------------------------
8FjBpOmy
anotherWikiPageName
============================== WikiPageNames counter examples (unicode)
Småbokstaver should not produce a link
neither should AbAbÅ nor AbAbÅÅb
------------------------------
<p>
Småbokstaver should not produce a link
neither should AbAbÅ nor AbAbÅÅb
</p>
------------------------------
Småbokstaver should not produce a link
neither should AbAbÅ nor AbAbÅÅb
============================== not a WikiPageNames at all (#9025 regression)
[ሀሁሂሃሄህሆለሉሊላሌልሎሏሐሑሒሓሔሕሖመሙሚማሜምሞሟሠሡሢሣሤሥሦረሩሪራሬርሮሯሰሱሲሳሴስሶሷሸሹሺሻሼሽሾሿቀቁቂቃቄቅቆቈቊቋቌቍቐቑቒቓቔቕቖቘቚቛቜቝበቡቢባቤብቦቧቨቩቪቫቬቭቮቯተቱቲታቴትቶቷቸቹቺቻቼችቾቿኀኁኂኃኄኅኆኈኊኋኌኍነኑኒናኔንኖኗኘኙኚኛኜኝኞኟአኡኢኣኤእኦኧከኩኪካኬክኮኰኲኳኴኵኸኹኺኻኼኽኾወዉዊዋዌውዎዐዑዒዓዔዕዖዘዙዚዛዜዝዞዟዠዡዢዣዤዥዦዧየዩዪያዬይዮደዱዲዳዴድዶዷጀጁጂጃጄጅጆጇገጉጊጋጌግጎጐጒጓጔጕጠጡጢጣጤጥጦጧጨጩጪጫጬጭጮጯጰጱጲጳጴጵጶጷጸጹጺጻጼጽጾጿፀፁፂፃፄፅፆፈፉፊፋፌፍፎፏፐፑፒፓፔፕፖፗፘፙፚ፩፪፫፬፭፮፯፰፱፲፳፴፵፶፷፸፹፺፻]------------------------------
<p>
[ሀሁሂሃሄህሆለሉሊላሌልሎሏሐሑሒሓሔሕሖመሙሚማሜምሞሟሠሡሢሣሤሥሦረሩሪራሬርሮሯሰሱሲሳሴስሶሷሸሹሺሻሼሽሾሿቀቁቂቃቄቅቆቈቊቋቌቍቐቑቒቓቔቕቖቘቚቛቜቝበቡቢባቤብቦቧቨቩቪቫቬቭቮቯተቱቲታቴትቶቷቸቹቺቻቼችቾቿኀኁኂኃኄኅኆኈኊኋኌኍነኑኒናኔንኖኗኘኙኚኛኜኝኞኟአኡኢኣኤእኦኧከኩኪካኬክኮኰኲኳኴኵኸኹኺኻኼኽኾወዉዊዋዌውዎዐዑዒዓዔዕዖዘዙዚዛዜዝዞዟዠዡዢዣዤዥዦዧየዩዪያዬይዮደዱዲዳዴድዶዷጀጁጂጃጄጅጆጇገጉጊጋጌግጎጐጒጓጔጕጠጡጢጣጤጥጦጧጨጩጪጫጬጭጮጯጰጱጲጳጴጵጶጷጸጹጺጻጼጽጾጿፀፁፂፃፄፅፆፈፉፊፋፌፍፎፏፐፑፒፓፔፕፖፗፘፙፚ፩፪፫፬፭፮፯፰፱፲፳፴፵፶፷፸፹፺፻]
</p>
------------------------------
[ሀሁሂሃሄህሆለሉሊላሌልሎሏሐሑሒሓሔሕሖመሙሚማሜምሞሟሠሡሢሣሤሥሦረሩሪራሬርሮሯሰሱሲሳሴስሶሷሸሹሺሻሼሽሾሿቀቁቂቃቄቅቆቈቊቋቌቍቐቑቒቓቔቕቖቘቚቛቜቝበቡቢባቤብቦቧቨቩቪቫቬቭቮቯተቱቲታቴትቶቷቸቹቺቻቼችቾቿኀኁኂኃኄኅኆኈኊኋኌኍነኑኒናኔንኖኗኘኙኚኛኜኝኞኟአኡኢኣኤእኦኧከኩኪካኬክኮኰኲኳኴኵኸኹኺኻኼኽኾወዉዊዋዌውዎዐዑዒዓዔዕዖዘዙዚዛዜዝዞዟዠዡዢዣዤዥዦዧየዩዪያዬይዮደዱዲዳዴድዶዷጀጁጂጃጄጅጆጇገጉጊጋጌግጎጐጒጓጔጕጠጡጢጣጤጥጦጧጨጩጪጫጬጭጮጯጰጱጲጳጴጵጶጷጸጹጺጻጼጽጾጿፀፁፂፃፄፅፆፈፉፊፋፌፍፎፏፐፑፒፓፔፕፖፗፘፙፚ፩፪፫፬፭፮፯፰፱፲፳፴፵፶፷፸፹፺፻]
============================== MoinMoin style forced links
This is a ["Wiki"] page link.
This is a ["Wiki" wiki page] link with label.
This is a ["Wiki?param=1#fragment"] page link with query and fragment.
------------------------------
<p>
This is a <a class="missing wiki" href="/wiki/Wiki" rel="nofollow">Wiki?</a> page link.
This is a <a class="missing wiki" href="/wiki/Wiki" rel="nofollow">wiki page?</a> link with label.
This is a <a class="missing wiki" href="/wiki/Wiki?param=1#fragment" rel="nofollow">Wiki?</a> page link with query and fragment.
</p>
------------------------------
============================== Wiki links with @version
wiki:page@12
WikiStart@12
WikiStart@12#heading
[WikiStart@12]
[WikiStart@12#heading]
This is a ["Wiki@12"] page link.
[wiki:WikiStart@12?format=txt v12 as text]
------------------------------
<p>
<a class="missing wiki" href="/wiki/page?version=12" rel="nofollow">wiki:page@12?</a>
<a class="wiki" href="/wiki/WikiStart?version=12">WikiStart@12</a>
<a class="wiki" href="/wiki/WikiStart?version=12#heading">WikiStart@12#heading</a>
[<a class="wiki" href="/wiki/WikiStart?version=12">WikiStart@12</a>]
[<a class="wiki" href="/wiki/WikiStart?version=12#heading">WikiStart@12#heading</a>]
This is a <a class="missing wiki" href="/wiki/Wiki?version=12" rel="nofollow">Wiki@12?</a> page link.
<a class="wiki" href="/wiki/WikiStart?version=12&format=txt">v12 as text</a>
</p>
------------------------------
============================== WikiPageName with label
See details of the [WikiPageNames wiki page name] syntax.
Here's a [BadExample\fbad] example with special whitespace.
We can also [WikiLabels '"use [quotes]"']
or [WikiLabels "'use [quotes]'"]
------------------------------
<p>
See details of the <a class="missing wiki" href="/wiki/WikiPageNames" rel="nofollow">wiki page name?</a> syntax.
Here's a <a class="missing wiki" href="/wiki/BadExample" rel="nofollow">bad?</a> example with special whitespace.
We can also <a class="missing wiki" href="/wiki/WikiLabels" rel="nofollow">"use [quotes]"?</a>
or <a class="missing wiki" href="/wiki/WikiLabels" rel="nofollow">'use [quotes]'?</a>
</p>
------------------------------
============================== WikiPageName with label should be strict...
new_channel_name [, '''integer''' handle [, '''boolean''' test]]
------------------------------
<p>
new_channel_name [, <strong>integer</strong> handle [, <strong>boolean</strong> test]]
</p>
------------------------------
============================== InterTrac for wiki
t:wiki:InterTrac
trac:wiki:InterTrac
[t:wiki:InterTrac intertrac]
[trac:wiki:InterTrac intertrac]
[trac:wiki:JonasBorgström jonas]
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/wiki%3AInterTrac" title="wiki:InterTrac in Trac's Trac"><span class="icon"></span>t:wiki:InterTrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/wiki%3AInterTrac" title="wiki:InterTrac in Trac's Trac"><span class="icon"></span>trac:wiki:InterTrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/wiki%3AInterTrac" title="wiki:InterTrac in Trac's Trac"><span class="icon"></span>intertrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/wiki%3AInterTrac" title="wiki:InterTrac in Trac's Trac"><span class="icon"></span>intertrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/wiki%3AJonasBorgstr%C3%B6m" title="wiki:JonasBorgström in Trac's Trac"><span class="icon"></span>jonas</a>
</p>
------------------------------
============================== Wiki InterTrac shorthands
t:InterTrac
trac:InterTrac
[t:InterTrac intertrac]
[trac:InterTrac intertrac]
[trac:JonasBorgström jonas]
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/InterTrac" title="InterTrac in Trac's Trac"><span class="icon"></span>t:InterTrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/InterTrac" title="InterTrac in Trac's Trac"><span class="icon"></span>trac:InterTrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/InterTrac" title="InterTrac in Trac's Trac"><span class="icon"></span>intertrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/InterTrac" title="InterTrac in Trac's Trac"><span class="icon"></span>intertrac</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/JonasBorgstr%C3%B6m" title="JonasBorgström in Trac's Trac"><span class="icon"></span>jonas</a>
</p>
------------------------------
============================== InterWiki links
This is the original MeatBall:InterMapTxt wiki page.
Checkout the [tsvn:http://svn.edgewall.com/repos/trac Trac Repository].
complex link complex:a:test with positional arguments
complex link complex:a (not enough arguments)
complex link complex:a:test:more (too many arguments)
in trac.ini inter:b:resource
in trac.ini over:c:something overrides wiki
NoLink:ignored
NoLink:
NoLink: ...
------------------------------
<p>
This is the original <a class="ext-link" href="http://www.usemod.com/cgi-bin/mb.pl?InterMapTxt" title="InterMapTxt in MeatBall..."><span class="icon"></span>MeatBall:InterMapTxt</a> wiki page.
Checkout the <a class="ext-link" href="tsvn:http://svn.edgewall.com/repos/trac" title="http://svn.edgewall.com/repos/trac in tsvn"><span class="icon"></span>Trac Repository</a>.
</p>
<p>
complex link <a class="ext-link" href="http://server/a/page/test?format=txt" title="resource test in a"><span class="icon"></span>complex:a:test</a> with positional arguments
complex link <a class="ext-link" href="http://server/a/page/?format=txt" title="resource in a"><span class="icon"></span>complex:a</a> (not enough arguments)
complex link <a class="ext-link" href="http://server/a/page/test:more?format=txt" title="resource test:more in a"><span class="icon"></span>complex:a:test:more</a> (too many arguments)
</p>
<p>
in trac.ini <a class="ext-link" href="http://inter/b/page/resource" title="Resource resource in b"><span class="icon"></span>inter:b:resource</a>
in trac.ini <a class="ext-link" href="http://over/c/page/something" title="c:something in over"><span class="icon"></span>over:c:something</a> overrides wiki
</p>
<p>
NoLink:ignored
<a class="missing wiki" href="/wiki/NoLink" rel="nofollow">NoLink?</a>:
<a class="missing wiki" href="/wiki/NoLink" rel="nofollow">NoLink?</a>: ...
</p>
------------------------------
============================== InterWiki links with parameters and fragment
See also MeatBall:InterMapTxt#there wiki page
and MeatBall:InterMapTxt?format=txt#there wiki page.
complex link complex:a:test?go#there with positional arguments
------------------------------
<p>
See also <a class="ext-link" href="http://www.usemod.com/cgi-bin/mb.pl?InterMapTxt#there" title="InterMapTxt in MeatBall..."><span class="icon"></span>MeatBall:InterMapTxt#there</a> wiki page
and <a class="ext-link" href="http://www.usemod.com/cgi-bin/mb.pl?InterMapTxt&format=txt#there" title="InterMapTxt in MeatBall..."><span class="icon"></span>MeatBall:InterMapTxt?format=txt#there</a> wiki page.
</p>
<p>
complex link <a class="ext-link" href="http://server/a/page/test?format=txt&go#there" title="resource test in a"><span class="icon"></span>complex:a:test?go#there</a> with positional arguments
</p>
------------------------------
============================== Regression for #9712
This is not a link: x,://localhost
------------------------------
<p>
This is not a link: x,:<em>localhost
</em></p>
------------------------------
============================== Wiki links with @version using unicode digits
WikiStart@₄₂
WikiStart@₄₂#heading
[WikiStart@₄₂]
[WikiStart@₄₂#heading]
------------------------------
<p>
<a class="wiki" href="/wiki/WikiStart">WikiStart</a>@₄₂
<a class="wiki" href="/wiki/WikiStart">WikiStart</a>@₄₂#heading
[<a class="wiki" href="/wiki/WikiStart">WikiStart</a>@₄₂]
[<a class="wiki" href="/wiki/WikiStart">WikiStart</a>@₄₂#heading]
</p>
------------------------------
""" #" Emacs likes it that way better
RELATIVE_LINKS_TESTS = u"""
============================== Relative to the project url
[//docs Documentation]
[//docs?param=1#fragment Documentation]
[//docs]
[//docs //docs]
[//docs?param=1#fragment]
[// Home]
[//]
[//?param=1#fragment]
------------------------------
<p>
<a href="/docs">Documentation</a>
<a href="/docs?param=1#fragment">Documentation</a>
<a href="/docs">docs</a>
<a href="/docs">//docs</a>
<a href="/docs?param=1#fragment">docs</a>
<a href="/">Home</a>
<a href="/">//</a>
<a href="/?param=1#fragment">//</a>
</p>
------------------------------
============================== Relative to the base url
[/newticket?priority=high#fragment bug]
[/newticket?priority=high#fragment]
[/newticket]
[/newticket /newticket]
[/ Project]
[/]
[/?param=1#fragment]
------------------------------
<p>
<a href="/newticket?priority=high#fragment">bug</a>
<a href="/newticket?priority=high#fragment">newticket</a>
<a href="/newticket">newticket</a>
<a href="/newticket">/newticket</a>
<a href="/">Project</a>
<a href="/">/</a>
<a href="/?param=1#fragment">/</a>
</p>
------------------------------
============================== Relative to the current page
[.]
[./]
[..]
[../]
[./../.]
[. this page]
[./Detail see detail]
[./Detail]
[./Detail ./Detail]
[.. see parent]
[../Other see other]
[../Other]
[../Other ../Other]
[.././../Other]
------------------------------
<p>
<a class="wiki" href="/wiki/Main/Sub">.</a>
<a class="wiki" href="/wiki/Main/Sub">./</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">..?</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">../?</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">./../.?</a>
<a class="wiki" href="/wiki/Main/Sub">this page</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">see detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">./Detail?</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main/Other" rel="nofollow">see other?</a>
<a class="missing wiki" href="/wiki/Main/Other" rel="nofollow">Other?</a>
<a class="missing wiki" href="/wiki/Main/Other" rel="nofollow">../Other?</a>
<a class="missing wiki" href="/wiki/Other" rel="nofollow">Other?</a>
</p>
------------------------------
============================== Relative to the current page, in wiki realm
[wiki:. this page]
[wiki:./Detail]
[wiki:"./Detail"]
[wiki:./Detail ./Detail]
[wiki:./Detail see detail]
[wiki:.. see parent]
[wiki:../Other see other]
[wiki:.././../Other]
["."]
[".?param=1#fragment"]
["./Detail"]
["./Detail?param=1#fragment"]
[".."]
["..?param=1#fragment"]
["../Other"]
["../Other?param=1#fragment"]
[".././../Other"]
------------------------------
<p>
<a class="wiki" href="/wiki/Main/Sub">this page</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">./Detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">see detail?</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main/Other" rel="nofollow">see other?</a>
<a class="missing wiki" href="/wiki/Other" rel="nofollow">Other?</a>
<a class="wiki" href="/wiki/Main/Sub">.</a>
<a class="wiki" href="/wiki/Main/Sub?param=1#fragment">.</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail?param=1#fragment" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main" rel="nofollow">..?</a>
<a class="missing wiki" href="/wiki/Main?param=1#fragment" rel="nofollow">..?</a>
<a class="missing wiki" href="/wiki/Main/Other" rel="nofollow">Other?</a>
<a class="missing wiki" href="/wiki/Main/Other?param=1#fragment" rel="nofollow">Other?</a>
<a class="missing wiki" href="/wiki/Other" rel="nofollow">Other?</a>
</p>
------------------------------
============================== Relative to the current page, as CamelCase
OnePage/SubPage
./SubPage
../SiblingPage
.././../HigherPage
/TopPage
------------------------------
<p>
<a class="missing wiki" href="/wiki/Main/OnePage/SubPage" rel="nofollow">OnePage/SubPage?</a>
<a class="missing wiki" href="/wiki/Main/Sub/SubPage" rel="nofollow">./SubPage?</a>
<a class="missing wiki" href="/wiki/Main/SiblingPage" rel="nofollow">../SiblingPage?</a>
<a class="missing wiki" href="/wiki/HigherPage" rel="nofollow">.././../HigherPage?</a>
<a class="missing wiki" href="/wiki/TopPage" rel="nofollow">/TopPage?</a>
</p>
------------------------------
============================== Relative to the current page with query strings and fragments
[#topic see topic]
[?param=1#topic see topic]
[.#topic see topic]
[.?param=1#topic see topic]
[./#topic see topic]
[./?param=1#topic see topic]
[./Detail#topic see detail]
[./Detail?param=1#topic see detail]
[./Detail?param=1#topic]
[..#topic see parent]
[..?param=1#topic see parent]
[../#topic see parent]
[../?param=1#topic see parent]
[../Other#topic see other]
[../Other?param=1#topic see other]
[../Other?param=1#topic]
[../Other/#topic see other]
[../Other/?param=1#topic see other]
------------------------------
<p>
<a class="wiki" href="/wiki/Main/Sub#topic">see topic</a>
<a class="wiki" href="/wiki/Main/Sub?param=1#topic">see topic</a>
<a class="wiki" href="/wiki/Main/Sub#topic">see topic</a>
<a class="wiki" href="/wiki/Main/Sub?param=1#topic">see topic</a>
<a class="wiki" href="/wiki/Main/Sub#topic">see topic</a>
<a class="wiki" href="/wiki/Main/Sub?param=1#topic">see topic</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail#topic" rel="nofollow">see detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail?param=1#topic" rel="nofollow">see detail?</a>
<a class="missing wiki" href="/wiki/Main/Sub/Detail?param=1#topic" rel="nofollow">Detail?</a>
<a class="missing wiki" href="/wiki/Main#topic" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main?param=1#topic" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main#topic" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main?param=1#topic" rel="nofollow">see parent?</a>
<a class="missing wiki" href="/wiki/Main/Other#topic" rel="nofollow">see other?</a>
<a class="missing wiki" href="/wiki/Main/Other?param=1#topic" rel="nofollow">see other?</a>
<a class="missing wiki" href="/wiki/Main/Other?param=1#topic" rel="nofollow">Other?</a>
<a class="missing wiki" href="/wiki/Main/Other#topic" rel="nofollow">see other?</a>
<a class="missing wiki" href="/wiki/Main/Other?param=1#topic" rel="nofollow">see other?</a>
</p>
------------------------------
""" # "
SPLIT_PAGE_NAMES_TESTS = u"""
============================== Splitting relative links
[//WikiPage]
[/WikiPage]
[./WikiPage]
[../WikiPage]
[//WikiPage?param=1#fragment]
[/WikiPage?param=1#fragment]
[./WikiPage?param=1#fragment]
[../WikiPage?param=1#fragment]
But not [./wiki_page]
And not [../WikiPage WikiPage]
------------------------------
<p>
<a href="/WikiPage">Wiki Page</a>
<a href="/WikiPage">Wiki Page</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a href="/WikiPage?param=1#fragment">Wiki Page</a>
<a href="/WikiPage?param=1#fragment">Wiki Page</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
But not <a class="missing wiki" href="/wiki/Main/Sub/wiki_page" rel="nofollow">wiki_page?</a>
And not <a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">WikiPage?</a>
</p>
------------------------------
============================== Splitting scoped links
[wiki:WikiPage]
[wiki:./WikiPage]
[wiki:../WikiPage]
[wiki:./.././WikiPage]
[wiki:"./.././WikiPage"]
[wiki:WikiPage?param=1#fragment]
[wiki:./WikiPage?param=1#fragment]
[wiki:../WikiPage?param=1#fragment]
But not [wiki:./wiki_page]
And not [wiki:../WikiPage WikiPage]
------------------------------
<p>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
But not <a class="missing wiki" href="/wiki/Main/Sub/wiki_page" rel="nofollow">wiki_page?</a>
And not <a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">WikiPage?</a>
</p>
------------------------------
============================== Splitting internal free links
["WikiPage"]
["./WikiPage"]
["../WikiPage"]
["./.././WikiPage"]
["WikiPage?param=1#fragment"]
["./WikiPage?param=1#fragment"]
["../WikiPage?param=1#fragment"]
But not ["./wiki_page"]
And not ["../WikiPage" WikiPage]
------------------------------
<p>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/Sub/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
<a class="missing wiki" href="/wiki/Main/WikiPage?param=1#fragment" rel="nofollow">Wiki Page?</a>
But not <a class="missing wiki" href="/wiki/Main/Sub/wiki_page" rel="nofollow">wiki_page?</a>
And not <a class="missing wiki" href="/wiki/Main/WikiPage" rel="nofollow">WikiPage?</a>
</p>
------------------------------
""" # "
SCOPED_LINKS_TESTS = u"""
============================== Scoped links for hierarchical pages
ThirdLevel
[wiki:ThirdLevel]
OtherThirdLevel
[wiki:OtherThirdLevel]
SecondLevel/OtherThirdLevel
[wiki:SecondLevel/OtherThirdLevel]
SecondLevel
[wiki:SecondLevel]
FirstLevel
[wiki:FirstLevel]
TestPage
[wiki:TestPage]
MissingPage
[wiki:MissingPage]
FirstLevel/MissingPage
[wiki:FirstLevel/MissingPage]
SecondLevel/MissingPage
[wiki:SecondLevel/MissingPage]
MissingFirstLevel/MissingPage
[wiki:MissingFirstLevel/MissingPage]
["/OtherThirdLevel"]
[wiki:/OtherThirdLevel]
[wiki:/OtherThirdLevel /OtherThirdLevel]
------------------------------
<p>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/ThirdLevel">ThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/ThirdLevel">ThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/OtherThirdLevel">OtherThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/OtherThirdLevel">OtherThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/OtherThirdLevel">SecondLevel/OtherThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel/OtherThirdLevel">SecondLevel/OtherThirdLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel">SecondLevel</a>
<a class="wiki" href="/wiki/FirstLevel/SecondLevel">SecondLevel</a>
<a class="wiki" href="/wiki/FirstLevel">FirstLevel</a>
<a class="wiki" href="/wiki/FirstLevel">FirstLevel</a>
<a class="wiki" href="/wiki/TestPage">TestPage</a>
<a class="wiki" href="/wiki/TestPage">TestPage</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingPage" rel="nofollow">MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingPage" rel="nofollow">MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/MissingPage" rel="nofollow">FirstLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/MissingPage" rel="nofollow">FirstLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingPage" rel="nofollow">SecondLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingPage" rel="nofollow">SecondLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingFirstLevel/MissingPage" rel="nofollow">MissingFirstLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/FirstLevel/SecondLevel/MissingFirstLevel/MissingPage" rel="nofollow">MissingFirstLevel/MissingPage?</a>
<a class="missing wiki" href="/wiki/OtherThirdLevel" rel="nofollow">OtherThirdLevel?</a>
<a class="missing wiki" href="/wiki/OtherThirdLevel" rel="nofollow">OtherThirdLevel?</a>
<a class="missing wiki" href="/wiki/OtherThirdLevel" rel="nofollow">/OtherThirdLevel?</a>
</p>
------------------------------
""" # "
def wiki_setup(tc):
tc.env.config.set('wiki', 'render_unsafe_content', True) # for #9712
now = datetime.now(utc)
wiki0 = WikiPage(tc.env)
wiki0.name = 'Main/Sub'
wiki0.text = '--'
wiki0.save('joe', 'subpage', '::1', now)
wiki1 = WikiPage(tc.env)
wiki1.name = 'TestPage'
wiki1.text = '--'
wiki1.save('joe', 'normal WikiPageNames', '::1', now)
wiki2 = WikiPage(tc.env)
wiki2.name = 'Space 1 23'
wiki2.text = '--'
wiki2.save('joe', 'not a WikiPageNames', '::1', now)
wiki3 = WikiPage(tc.env)
wiki3.name = u"C'est l'\xe9t\xe9"
wiki3.text = '--'
wiki3.save('joe', 'unicode WikiPageNames', '::1', now)
imt = WikiPage(tc.env)
imt.name = u"InterMapTxt"
imt.text = """
This is the InterMapTxt
----
{{{
MeatBall http://www.usemod.com/cgi-bin/mb.pl? # $1 in MeatBall...
tsvn tsvn:
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | true |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/macros.py | trac/trac/wiki/tests/macros.py | # -*- coding: utf-8 -*-
from datetime import datetime
import unittest
from trac.config import Option
from trac.test import locale_en
from trac.util.datefmt import format_date, utc
from trac.wiki.model import WikiPage
from trac.wiki.tests import formatter
# == [[Image]]
# Note: using `« test »` string in the following tests for checking
# unicode robustness and whitespace support (first space is
# normal ASCII SPACE, second is Unicode NO-BREAK SPACE).
IMAGE_MACRO_TEST_CASES = u"""
============================== source: Image, no other arguments
[[Image(source:« test ».png)]]
------------------------------
<p>
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB.png"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" alt="source:« test ».png" title="source:« test ».png" /></a>
</p>
------------------------------
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB.png"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" alt="source:« test ».png" title="source:« test ».png" /></a>
============================== intertrac:source: Image, no other arguments
[[Image(trac:source:/trunk/doc/images/bkgnd_pattern_« test ».png)]]
------------------------------
<p>
<a style="padding:0; border:none" href="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png"><img src="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png%3Fformat%3Draw" alt="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" title="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" /></a>
</p>
------------------------------
<a style="padding:0; border:none" href="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png"><img src="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png%3Fformat%3Draw" alt="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" title="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" /></a>
============================== source: Image, nolink
[[Image(source:« test », nolink)]]
------------------------------
<p>
<img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="source:« test »" title="source:« test »" />
</p>
------------------------------
<img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="source:« test »" title="source:« test »" />
============================== source: Image, normal args
[[Image(source:« test », align=left, title=Test)]]
------------------------------
<p>
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="source:« test »" style="float:left" title="Test" /></a>
</p>
------------------------------
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="source:« test »" style="float:left" title="Test" /></a>
============================== source: Image, size arg
[[Image(source:« test », 30%)]]
------------------------------
<p>
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB"><img width="30%" alt="source:« test »" title="source:« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" /></a>
</p>
------------------------------
============================== source: Image, keyword alignment
[[Image(source:« test », right)]]
------------------------------
<p>
<a style="padding:0; border:none" href="/browser/%C2%AB%20test%C2%A0%C2%BB"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="source:« test »" style="float:right" title="source:« test »" /></a>
</p>
------------------------------
============================== http: Image, nolink
[[Image(http://www.edgewall.com/gfx/shredder_« test ».png, nolink)]]
------------------------------
<p>
<img src="http://www.edgewall.com/gfx/shredder_« test ».png" alt="http://www.edgewall.com/gfx/shredder_« test ».png" title="http://www.edgewall.com/gfx/shredder_« test ».png" />
</p>
------------------------------
============================== http: Image, absolute, many ':'
[[Image(http://chart.apis.google.com:80/chart?cht=p3&chd=s:hW&chs=250x100&chl=Héllo|Wôrld, title=Google & Charting, link=)]]
------------------------------
<p>
<img src="http://chart.apis.google.com:80/chart?cht=p3&chd=s:hW&chs=250x100&chl=Héllo|Wôrld" alt="http://chart.apis.google.com:80/chart" title="Google & Charting" />
</p>
------------------------------
============================== // Image, server-relative
[[Image(//browser/« test »?format=raw, link=)]]
------------------------------
<p>
<img src="/browser/« test »?format=raw" alt="/browser/« test »" title="/browser/« test »" />
</p>
------------------------------
============================== / Image, project-relative, link to WikiStart
[[Image(/browser/« test »?format=raw, link=wiki:WikiStart)]]
------------------------------
<p>
<a style="padding:0; border:none" href="/wiki/WikiStart"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="/browser/« test »" title="/browser/« test »" /></a>
</p>
------------------------------
<a style="padding:0; border:none" href="/wiki/WikiStart"><img src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" alt="/browser/« test »" title="/browser/« test »" /></a>
============================== Strip unicode white-spaces and ZWSPs (#10668)
[[Image( source:« test ».png , nolink)]]
------------------------------
<p>
<img src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" alt="source:« test ».png" title="source:« test ».png" />
</p>
------------------------------
<img src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" alt="source:« test ».png" title="source:« test ».png" />
------------------------------
"""
# Note: in the <img> src attribute above, the Unicode characters
# within the URI sometimes come out as %-encoded, sometimes raw
# (server-relative case). Both forms are valid (at least
# according to the W3C XHTML validator).
# == [[TitleIndex]]
def add_pages(tc, names):
now = datetime.now(utc)
for name in names:
w = WikiPage(tc.env)
w.name = name
w.text = '--'
w.save('joe', 'the page ' + name, '::1', now)
def titleindex_teardown(tc):
tc.env.reset_db()
TITLEINDEX1_MACRO_TEST_CASES = u"""
============================== TitleIndex, default format
[[TitleIndex()]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex]]
============================== TitleIndex, compact format
[[TitleIndex(format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
"""
TITLEINDEX2_MACRO_TEST_CASES = u"""
============================== TitleIndex, default format
[[TitleIndex()]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex]]
============================== TitleIndex, compact format
[[TitleIndex(format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">WikiEnd</a>, <a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, default format with prefix
[[TitleIndex(Wiki)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, compact format with prefix
[[TitleIndex(Wiki,format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">WikiEnd</a>, <a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, default format with prefix hidden
[[TitleIndex(Wiki,hideprefix)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">End</a></li><li><a href="/wiki/WikiStart">Start</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, compact format with prefix hidden
[[TitleIndex(Wiki,hideprefix,format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">End</a>, <a href="/wiki/WikiStart">Start</a>
</p>
------------------------------
[[TitleIndex(...)]]
"""
def titleindex2_setup(tc):
add_pages(tc, ['WikiEnd'])
TITLEINDEX3_MACRO_TEST_CASES = u"""
============================== TitleIndex, group format
[[TitleIndex(Wiki,format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>Wiki</strong><ul><li><strong>End</strong><ul><li><a href="/wiki/WikiEnd/First">WikiEnd/First</a></li><li><a href="/wiki/WikiEnd/Second">WikiEnd/Second</a></li></ul></li><li><strong>Start</strong><ul><li><a href="/wiki/WikiStart">WikiStart</a></li><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format
[[TitleIndex(WikiStart/, format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li>WikiStart<ul><li><a href="/wiki/WikiStart/First">First</a></li><li><a href="/wiki/WikiStart/Second">Second</a></li><li><a href="/wiki/WikiStart/Third">Third</a></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, group format, prefix hidden
[[TitleIndex(Wiki,hideprefix,format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>End</strong><ul><li><a href="/wiki/WikiEnd/First">End/First</a></li><li><a href="/wiki/WikiEnd/Second">End/Second</a></li></ul></li><li><strong>Start</strong><ul><li><a href="/wiki/WikiStart">Start</a></li><li><a href="/wiki/WikiStart/First">Start/First</a></li><li><a href="/wiki/WikiStart/Second">Start/Second</a></li><li><a href="/wiki/WikiStart/Third">Start/Third</a></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format, prefix hidden
[[TitleIndex(WikiStart/,hideprefix,format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/First">First</a></li><li><a href="/wiki/WikiStart/Second">Second</a></li><li><a href="/wiki/WikiStart/Third">Third</a></li></ul></div><p>
</p>
------------------------------
"""
def titleindex3_setup(tc):
add_pages(tc, [
'WikiStart/First',
'WikiStart/Second',
'WikiStart/Third',
'WikiEnd/First',
'WikiEnd/Second',
])
TITLEINDEX4_MACRO_TEST_CASES = u"""
============================== TitleIndex group and page with numbers (#7919)
[[TitleIndex(format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>0.11</strong><ul><li><strong>Group</strong><ul><li><a href="/wiki/0.11/GroupOne">0.11/GroupOne</a></li><li><a href="/wiki/0.11/GroupTwo">0.11/GroupTwo</a></li></ul></li><li><a href="/wiki/0.11/Test">0.11/Test</a></li></ul></li><li><strong>Test</strong><ul><li><strong>0.11</strong><ul><li><a href="/wiki/Test0.11/Abc">Test0.11/Abc</a></li><li><a href="/wiki/Test0.11Abc">Test0.11Abc</a></li></ul></li><li><strong>0.12</strong><ul><li><a href="/wiki/Test0.12Def">Test0.12Def</a></li><li><a href="/wiki/Test0.12Ijk">Test0.12Ijk</a></li></ul></li><li><strong>0.13</strong><ul><li><a href="/wiki/Test0.13alpha">Test0.13alpha</a></li><li><a href="/wiki/Test0.13beta">Test0.13beta</a></li></ul></li><li><a href="/wiki/Test0.131">Test0.131</a></li><li><a href="/wiki/Test2">Test2</a></li><li><a href="/wiki/TestTest">TestTest</a></li><li><a href="/wiki/TestThing">TestThing</a></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, compact format with prefix hidden, including Test0.13*
[[TitleIndex(Test,format=compact,include=*0.13*)]]
------------------------------
<p>
<a href="/wiki/Test0.131">Test0.131</a>, <a href="/wiki/Test0.13alpha">Test0.13alpha</a>, <a href="/wiki/Test0.13beta">Test0.13beta</a>
</p>
------------------------------
============================== TitleIndex, compact format with prefix hidden, including Test0.13* but excluding Test0.131
[[TitleIndex(Test,format=compact,include=*0.13*,exclude=*1)]]
------------------------------
<p>
<a href="/wiki/Test0.13alpha">Test0.13alpha</a>, <a href="/wiki/Test0.13beta">Test0.13beta</a>
</p>
------------------------------
============================== TitleIndex, compact format, excluding various topics
[[TitleIndex(Test,format=compact,exclude=Test0.13*:*0.11*:Test2:Test*i*)]]
------------------------------
<p>
<a href="/wiki/Test0.12Def">Test0.12Def</a>, <a href="/wiki/Test0.12Ijk">Test0.12Ijk</a>, <a href="/wiki/TestTest">TestTest</a>
</p>
------------------------------
============================== TitleIndex, compact format, including and excluding various topics
[[TitleIndex(format=compact,include=*Group*:test2,exclude=*One)]]
------------------------------
<p>
<a href="/wiki/0.11/GroupTwo">0.11/GroupTwo</a>
</p>
------------------------------
"""
def titleindex4_setup(tc):
add_pages(tc, [
'TestTest',
'TestThing',
'Test2',
'Test0.11Abc',
'Test0.11/Abc',
'Test0.12Def',
'Test0.12Ijk',
'Test0.13alpha',
'Test0.13beta',
'Test0.131',
'0.11/Test',
'0.11/GroupOne',
'0.11/GroupTwo',
])
TITLEINDEX5_MACRO_TEST_CASES = u"""
============================== TitleIndex, hierarchy format with complex hierarchy
[[TitleIndex(format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/TracDev">TracDev</a><ul><li><a href="/wiki/TracDev/ApiChanges">ApiChanges</a><ul><li><a href="/wiki/TracDev/ApiChanges/0.10">0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">0.11</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12">0.12</a><ul><li>Missing<ul><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">Exists</a></li></ul></li></ul></li></ul></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format with complex hierarchy (and min=5)
[[TitleIndex(format=hierarchy,min=5)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/TracDev">TracDev</a><ul><li><a href="/wiki/TracDev/ApiChanges">ApiChanges</a></li><li><a href="/wiki/TracDev/ApiChanges/0.10">ApiChanges/0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">ApiChanges/0.11</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12">ApiChanges/0.12</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">ApiChanges/0.12/Missing/Exists</a></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, group format with complex hierarchy
[[TitleIndex(format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>TracDev</strong><ul><li><a href="/wiki/TracDev">TracDev</a></li><li><strong>ApiChanges</strong><ul><li><a href="/wiki/TracDev/ApiChanges">TracDev/ApiChanges</a></li><li><a href="/wiki/TracDev/ApiChanges/0.10">TracDev/ApiChanges/0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">TracDev/ApiChanges/0.11</a></li><li><strong>0.12</strong><ul><li><a href="/wiki/TracDev/ApiChanges/0.12">TracDev/ApiChanges/0.12</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">TracDev/ApiChanges/0.12/Missing/Exists</a></li></ul></li></ul></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
"""
def titleindex5_setup(tc):
add_pages(tc, [
'TracDev',
'TracDev/ApiChanges',
'TracDev/ApiChanges/0.10',
'TracDev/ApiChanges/0.11',
'TracDev/ApiChanges/0.12',
'TracDev/ApiChanges/0.12/Missing/Exists',
])
RECENTCHANGES_MACRO_TEST_CASES = u""""
============================== RecentChanges, group option
[[RecentChanges()]]
[[RecentChanges(group=date)]]
[[RecentChanges(group=none)]]
[[RecentChanges(,2,group=none)]]
[[RecentChanges(Wiki,group=none)]]
[[RecentChanges(Wiki,1,group=none)]]
------------------------------
<p>
</p><div><h3>%(date)s</h3><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div><h3>%(date)s</h3><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li></ul></div><p>
</p><div><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li></ul></div><p>
</p>
------------------------------
"""
def recentchanges_setup(tc):
def add_pages(tc, names):
for name in names:
now = datetime.now(utc)
w = WikiPage(tc.env)
w.name = name
w.text = '--'
w.save('joe', 'the page ' + name, '::1', now)
add_pages(tc, [
'WikiMid',
'WikiEnd',
])
tc.correct = tc.correct % {'date': format_date(tzinfo=utc,
locale=locale_en)}
def recentchanges_teardown(tc):
tc.env.reset_db()
TRACINI_MACRO_TEST_CASES = u"""\
============================== TracIni, option with empty doc (#10940)
[[TracIni(section-42)]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-42-section"><code>[section-42]</code></h3>\
<table class="wiki"><tbody>\
<tr><td><tt>option1</tt></td><td></td><td class="default"><code>value</code></td></tr>\
<tr><td><tt>option2</tt></td><td>blah</td><td class="default"><code>value</code></td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
"""
def tracini_setup(tc):
tc._orig_registry = Option.registry
class Foo(object):
option_a1 = (Option)('section-42', 'option1', 'value', doc='')
option_a2 = (Option)('section-42', 'option2', 'value', doc='blah')
def tracini_teardown(tc):
Option.registry = tc._orig_registry
def suite():
suite = unittest.TestSuite()
suite.addTest(formatter.suite(IMAGE_MACRO_TEST_CASES, file=__file__))
suite.addTest(formatter.suite(TITLEINDEX1_MACRO_TEST_CASES, file=__file__))
suite.addTest(formatter.suite(TITLEINDEX2_MACRO_TEST_CASES, file=__file__,
setup=titleindex2_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.suite(TITLEINDEX3_MACRO_TEST_CASES, file=__file__,
setup=titleindex3_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.suite(TITLEINDEX4_MACRO_TEST_CASES, file=__file__,
setup=titleindex4_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.suite(TITLEINDEX5_MACRO_TEST_CASES, file=__file__,
setup=titleindex5_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.suite(RECENTCHANGES_MACRO_TEST_CASES, file=__file__,
setup=recentchanges_setup,
teardown=recentchanges_teardown))
suite.addTest(formatter.suite(TRACINI_MACRO_TEST_CASES, file=__file__,
setup=tracini_setup,
teardown=tracini_teardown))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/functional.py | trac/trac/wiki/tests/functional.py | #!/usr/bin/python
from trac.tests.functional import *
from trac.mimeview.rst import has_docutils
from trac.util import get_pkginfo
class TestWiki(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a wiki page and attach a file"""
# TODO: this should be split into multiple tests
pagename = random_unique_camel()
self._tester.create_wiki_page(pagename)
self._tester.attach_file_to_wiki(pagename)
class TestWikiRename(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for simple wiki rename"""
pagename = random_unique_camel()
self._tester.create_wiki_page(pagename)
attachment = self._tester.attach_file_to_wiki(pagename)
base_url = self._tester.url
page_url = base_url + "/wiki/" + pagename
def click_rename():
tc.formvalue('rename', 'action', 'rename')
tc.submit()
tc.url(page_url + r'\?action=rename')
tc.find("New name:")
tc.go(page_url)
tc.find("Rename page")
click_rename()
# attempt to give an empty new name
tc.formvalue('rename-form', 'new_name', '')
tc.submit('submit')
tc.url(page_url)
tc.find("A new name is mandatory for a rename")
# attempt to rename the page to an invalid page name
tc.formvalue('rename-form', 'new_name', '../WikiStart')
tc.submit('submit')
tc.url(page_url)
tc.find("The new name is invalid")
# attempt to rename the page to the current page name
tc.formvalue('rename-form', 'new_name', pagename)
tc.submit('submit')
tc.url(page_url)
tc.find("The new name must be different from the old name")
# attempt to rename the page to an existing page name
tc.formvalue('rename-form', 'new_name', 'WikiStart')
tc.submit('submit')
tc.url(page_url)
tc.find("The page WikiStart already exists")
# correct rename to new page name (old page replaced by a redirection)
tc.go(page_url)
click_rename()
newpagename = pagename + 'Renamed'
tc.formvalue('rename-form', 'new_name', newpagename)
tc.formvalue('rename-form', 'redirect', True)
tc.submit('submit')
# check redirection page
tc.url(page_url)
tc.find("See.*/wiki/" + newpagename)
# check whether attachment exists on the new page but not on old page
tc.go(base_url + '/attachment/wiki/' + newpagename + '/' + attachment)
tc.notfind("Error: Invalid Attachment")
tc.go(base_url + '/attachment/wiki/' + pagename + '/' + attachment)
tc.find("Error: Invalid Attachment")
# rename again to another new page name (this time, no redirection)
tc.go(page_url)
click_rename()
newpagename = pagename + 'RenamedAgain'
tc.formvalue('rename-form', 'new_name', newpagename)
tc.formvalue('rename-form', 'redirect', False)
tc.submit('submit')
tc.url(base_url + "/wiki/" + newpagename)
# this time, the original page is gone
tc.go(page_url)
tc.url(page_url)
tc.find("The page %s does not exist" % pagename)
class RegressionTestTicket4812(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/4812"""
pagename = random_unique_camel() + '/' + random_unique_camel()
self._tester.create_wiki_page(pagename)
self._tester.attach_file_to_wiki(pagename)
tc.notfind('does not exist')
class ReStructuredTextWikiTest(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Render reStructured text using a wikiprocessor"""
pagename = random_unique_camel()
self._tester.create_wiki_page(pagename, content="""
{{{
#!rst
Hello
=====
.. trac:: wiki:WikiStart Some Link
}}}
""")
self._tester.go_to_wiki(pagename)
tc.find("Some Link")
tc.find(r'<h1[^>]*>Hello')
tc.notfind("wiki:WikiStart")
tc.follow("Some Link")
tc.url(self._tester.url + "/wiki/WikiStart")
class ReStructuredTextCodeBlockTest(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Render reStructured code block"""
pagename = random_unique_camel()
self._tester.create_wiki_page(pagename, content="""
{{{
#!rst
.. code-block:: python
print "123"
}}}
""")
self._tester.go_to_wiki(pagename)
tc.notfind("code-block")
tc.find('print')
tc.find('"123"')
class RegressionTestTicket10274(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/10274"""
self._tester.go_to_wiki('WikiStart/..')
tc.find("Invalid Wiki page name 'WikiStart/..'")
self._tester.go_to_wiki('../WikiStart')
tc.find("Invalid Wiki page name '../WikiStart'")
self._tester.go_to_wiki('WikiStart/./SubPage')
tc.find("Invalid Wiki page name 'WikiStart/./SubPage'")
class RegressionTestTicket10850(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/10850"""
pagename = random_unique_camel()
self._tester.create_wiki_page(pagename)
# colon characters
attachment = self._tester.attach_file_to_wiki(
pagename, tempfilename='2012-09-11_15:36:40-test.tbz2')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename +
'/2012-09-11_15:36:40-test.tbz2')
tc.notfind('Error: Invalid Attachment')
# backslash characters
attachment = self._tester.attach_file_to_wiki(
pagename, tempfilename=r'/tmp/back\slash.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/back\slash.txt')
tc.notfind('Error: Invalid Attachment')
# Windows full path
attachment = self._tester.attach_file_to_wiki(
pagename, tempfilename=r'z:\tmp\windows:path.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/windows:path.txt')
tc.notfind('Error: Invalid Attachment')
# Windows share folder path
attachment = self._tester.attach_file_to_wiki(
pagename, tempfilename=r'\\server\share\file:name.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/file:name.txt')
tc.notfind('Error: Invalid Attachment')
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional.testcases
suite = trac.tests.functional.testcases.functionalSuite()
suite.addTest(TestWiki())
suite.addTest(TestWikiRename())
suite.addTest(RegressionTestTicket4812())
suite.addTest(RegressionTestTicket10274())
suite.addTest(RegressionTestTicket10850())
if has_docutils:
import docutils
if get_pkginfo(docutils):
suite.addTest(ReStructuredTextWikiTest())
suite.addTest(ReStructuredTextCodeBlockTest())
else:
print "SKIP: reST wiki tests (docutils has no setuptools metadata)"
else:
print "SKIP: reST wiki tests (no docutils)"
return suite
if __name__ == '__main__':
unittest.main(defaultTest='functionalSuite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/model.py | trac/trac/wiki/tests/model.py | # -*- coding: utf-8 -*-
from __future__ import with_statement
from datetime import datetime
import os.path
import shutil
from StringIO import StringIO
import tempfile
import unittest
from trac.attachment import Attachment
from trac.core import *
from trac.test import EnvironmentStub
from trac.tests.resource import TestResourceChangeListener
from trac.util.datefmt import utc, to_utimestamp
from trac.wiki import WikiPage, IWikiChangeListener
class TestWikiChangeListener(Component):
implements(IWikiChangeListener)
def __init__(self):
self.added = []
self.changed = []
self.deleted = []
self.deleted_version = []
self.renamed = []
def wiki_page_added(self, page):
self.added.append(page)
def wiki_page_changed(self, page, version, t, comment, author, ipnr):
self.changed.append((page, version, t, comment, author, ipnr))
def wiki_page_deleted(self, page):
self.deleted.append(page)
def wiki_page_version_deleted(self, page):
self.deleted_version.append(page)
def wiki_page_renamed(self, page, old_name):
self.renamed.append((page, old_name))
class WikiPageTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.env.path = os.path.join(tempfile.gettempdir(), 'trac-tempenv')
os.mkdir(self.env.path)
def tearDown(self):
shutil.rmtree(self.env.path)
self.env.reset_db()
def test_new_page(self):
page = WikiPage(self.env)
self.assertEqual(False, page.exists)
self.assertEqual(None, page.name)
self.assertEqual(0, page.version)
self.assertEqual('', page.text)
self.assertEqual(0, page.readonly)
self.assertEqual('', page.author)
self.assertEqual('', page.comment)
self.assertEqual(None, page.time)
def test_existing_page(self):
t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
self.env.db_transaction(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
('TestPage', 1, to_utimestamp(t), 'joe', '::1', 'Bla bla',
'Testing', 0))
page = WikiPage(self.env, 'TestPage')
self.assertEqual(True, page.exists)
self.assertEqual('TestPage', page.name)
self.assertEqual(1, page.version)
self.assertEqual(None, page.resource.version) # FIXME: Intentional?
self.assertEqual('Bla bla', page.text)
self.assertEqual(0, page.readonly)
self.assertEqual('joe', page.author)
self.assertEqual('Testing', page.comment)
self.assertEqual(t, page.time)
history = list(page.get_history())
self.assertEqual(1, len(history))
self.assertEqual((1, t, 'joe', 'Testing', '::1'), history[0])
page = WikiPage(self.env, 'TestPage', 1)
self.assertEqual(1, page.resource.version)
def test_create_page(self):
page = WikiPage(self.env)
page.name = 'TestPage'
page.text = 'Bla bla'
t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
page.save('joe', 'Testing', '::1', t)
self.assertEqual(True, page.exists)
self.assertEqual(1, page.version)
self.assertEqual(1, page.resource.version)
self.assertEqual(0, page.readonly)
self.assertEqual('joe', page.author)
self.assertEqual('Testing', page.comment)
self.assertEqual(t, page.time)
self.assertEqual(
[(1, to_utimestamp(t), 'joe', '::1', 'Bla bla', 'Testing', 0)],
self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',)))
listener = TestWikiChangeListener(self.env)
self.assertEqual(page, listener.added[0])
def test_update_page(self):
t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
t2 = datetime(2002, 1, 1, 1, 1, 1, 0, utc)
self.env.db_transaction(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
('TestPage', 1, to_utimestamp(t), 'joe', '::1', 'Bla bla',
'Testing', 0))
page = WikiPage(self.env, 'TestPage')
page.text = 'Bla'
page.save('kate', 'Changing', '192.168.0.101', t2)
self.assertEqual(2, page.version)
self.assertEqual(2, page.resource.version)
self.assertEqual(0, page.readonly)
self.assertEqual('kate', page.author)
self.assertEqual('Changing', page.comment)
self.assertEqual(t2, page.time)
with self.env.db_query as db:
rows = db("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',))
self.assertEqual(2, len(rows))
self.assertEqual((1, to_utimestamp(t), 'joe', '::1', 'Bla bla',
'Testing', 0), rows[0])
self.assertEqual((2, to_utimestamp(t2), 'kate', '192.168.0.101',
'Bla', 'Changing', 0), rows[1])
listener = TestWikiChangeListener(self.env)
self.assertEqual((page, 2, t2, 'Changing', 'kate', '192.168.0.101'),
listener.changed[0])
page = WikiPage(self.env, 'TestPage')
history = list(page.get_history())
self.assertEqual(2, len(history))
self.assertEqual((2, t2, 'kate', 'Changing', '192.168.0.101'),
history[0])
self.assertEqual((1, t, 'joe', 'Testing', '::1'), history[1])
def test_delete_page(self):
self.env.db_transaction(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
('TestPage', 1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0))
page = WikiPage(self.env, 'TestPage')
page.delete()
self.assertEqual(False, page.exists)
self.assertEqual([], self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',)))
listener = TestWikiChangeListener(self.env)
self.assertEqual(page, listener.deleted[0])
def test_delete_page_version(self):
self.env.db_transaction.executemany(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
[('TestPage', 1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0),
('TestPage', 2, 43, 'kate', '192.168.0.11', 'Bla', 'Changing', 0)])
page = WikiPage(self.env, 'TestPage')
page.delete(version=2)
self.assertEqual(True, page.exists)
self.assertEqual(
[(1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0)],
self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',)))
listener = TestWikiChangeListener(self.env)
self.assertEqual(page, listener.deleted_version[0])
def test_delete_page_last_version(self):
self.env.db_transaction(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
('TestPage', 1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0))
page = WikiPage(self.env, 'TestPage')
page.delete(version=1)
self.assertEqual(False, page.exists)
self.assertEqual([], self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',)))
listener = TestWikiChangeListener(self.env)
self.assertEqual(page, listener.deleted[0])
def test_rename_page(self):
data = (1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0)
self.env.db_transaction(
"INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
('TestPage',) + data)
attachment = Attachment(self.env, 'wiki', 'TestPage')
attachment.insert('foo.txt', StringIO(), 0, 1)
page = WikiPage(self.env, 'TestPage')
page.rename('PageRenamed')
self.assertEqual('PageRenamed', page.name)
self.assertEqual([data], self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('PageRenamed',)))
attachments = Attachment.select(self.env, 'wiki', 'PageRenamed')
self.assertEqual('foo.txt', attachments.next().filename)
self.assertRaises(StopIteration, attachments.next)
Attachment.delete_all(self.env, 'wiki', 'PageRenamed')
old_page = WikiPage(self.env, 'TestPage')
self.assertEqual(False, old_page.exists)
self.assertEqual([], self.env.db_query("""
SELECT version, time, author, ipnr, text, comment, readonly
FROM wiki WHERE name=%s
""", ('TestPage',)))
listener = TestWikiChangeListener(self.env)
self.assertEqual((page, 'TestPage'), listener.renamed[0])
def test_invalid_page_name(self):
invalid_names = ('../Page', 'Page/..', 'Page/////SubPage',
'Page/./SubPage', '/PagePrefix', 'PageSuffix/')
for name in invalid_names:
page = WikiPage(self.env)
page.name = name
page.text = 'Bla bla'
t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
self.assertRaises(TracError, page.save, 'joe', 'Testing', '::1', t)
page = WikiPage(self.env)
page.name = 'TestPage'
page.text = 'Bla bla'
t = datetime(2001, 1, 1, 1, 1, 1, 0, utc)
page.save('joe', 'Testing', '::1', t)
for name in invalid_names:
page = WikiPage(self.env, 'TestPage')
self.assertRaises(TracError, page.rename, name)
class WikiResourceChangeListenerTestCase(unittest.TestCase):
INITIAL_NAME = "Wiki page 1"
INITIAL_TEXT = "some text"
INITIAL_AUTHOR = "anAuthor"
INITIAL_COMMENT = "some comment"
INITIAL_REMOTE_ADDRESS = "::1"
def setUp(self):
self.env = EnvironmentStub(default_data=True)
self.listener = TestResourceChangeListener(self.env)
self.listener.resource_type = WikiPage
self.listener.callback = self.listener_callback
def tearDown(self):
self.env.reset_db()
def test_change_listener_created(self):
self._create_wiki_page(self.INITIAL_NAME)
self.assertEqual('created', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, WikiPage))
self.assertEqual(self.INITIAL_NAME, self.wiki_name)
self.assertEqual(self.INITIAL_TEXT, self.wiki_text)
def test_change_listener_text_changed(self):
wiki_page = self._create_wiki_page(self.INITIAL_NAME)
CHANGED_TEXT = "some other text"
wiki_page.text = CHANGED_TEXT
wiki_page.save("author1", "renamed_comment", "::2")
self.assertEqual('changed', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, WikiPage))
self.assertEqual(self.INITIAL_NAME, self.wiki_name)
self.assertEqual(CHANGED_TEXT, self.wiki_text)
self.assertEqual({"text":self.INITIAL_TEXT}, self.listener.old_values)
def test_change_listener_renamed(self):
wiki_page = self._create_wiki_page(self.INITIAL_NAME)
CHANGED_NAME = "NewWikiName"
wiki_page.rename(CHANGED_NAME)
self.assertEqual('changed', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, WikiPage))
self.assertEqual(CHANGED_NAME, self.wiki_name)
self.assertEqual(self.INITIAL_TEXT, self.wiki_text)
self.assertEqual({"name":self.INITIAL_NAME}, self.listener.old_values)
def test_change_listener_deleted(self):
wiki_page = self._create_wiki_page(self.INITIAL_NAME)
wiki_page.delete()
self.assertEqual('deleted', self.listener.action)
self.assertTrue(isinstance(self.listener.resource, WikiPage))
self.assertEqual(self.INITIAL_NAME, self.wiki_name)
def _create_wiki_page(self, name=None):
name = name or self.INITIAL_NAME
wiki_page = WikiPage(self.env, name)
wiki_page.text = self.INITIAL_TEXT
wiki_page.save(
self.INITIAL_AUTHOR,
self.INITIAL_COMMENT,
self.INITIAL_REMOTE_ADDRESS)
return wiki_page
def listener_callback(self, action, resource, context, old_values = None):
self.wiki_name = resource.name
self.wiki_text = resource.text
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(WikiPageTestCase, 'test'))
suite.addTest(unittest.makeSuite(
WikiResourceChangeListenerTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/formatter.py | trac/trac/wiki/tests/formatter.py | import difflib
import os
import re
import unittest
# Python 2.7 `assertMultiLineEqual` calls `safe_repr(..., short=True)`
# which breaks our custom failure display in WikiTestCase.
try:
from unittest.util import safe_repr
unittest.case.safe_repr = lambda obj, short=False: safe_repr(obj, False)
except ImportError:
pass
from datetime import datetime
from trac.core import *
from trac.test import Mock, MockPerm, EnvironmentStub, locale_en
from trac.util.datefmt import utc
from trac.util.html import html
from trac.util.text import strip_line_ws, to_unicode
from trac.web.chrome import web_context
from trac.web.href import Href
from trac.wiki.api import IWikiSyntaxProvider
from trac.wiki.formatter import (HtmlFormatter, InlineHtmlFormatter,
OutlineFormatter)
from trac.wiki.macros import WikiMacroBase
from trac.wiki.model import WikiPage
# We need to supply our own macro because the real macros
# can not be loaded using our 'fake' environment.
class HelloWorldMacro(WikiMacroBase):
"""A dummy macro used by the unit test."""
def expand_macro(self, formatter, name, content):
return 'Hello World, args = ' + content
class DivHelloWorldMacro(WikiMacroBase):
"""A dummy macro returning a div block, used by the unit test."""
def expand_macro(self, formatter, name, content):
return '<div>Hello World, args = %s</div>' % content
class TableHelloWorldMacro(WikiMacroBase):
"""A dummy macro returning a table block, used by the unit test."""
def expand_macro(self, formatter, name, content):
return """
<table><tr><th>Hello World</th><td>%s</td></tr></table>
""" % content
class DivCodeMacro(WikiMacroBase):
"""A dummy macro returning a div block, used by the unit test."""
def expand_macro(self, formatter, name, content):
return '<div class="code">Hello World, args = %s</div>' % content
class DivCodeElementMacro(WikiMacroBase):
"""A dummy macro returning a Genshi Element, used by the unit test."""
def expand_macro(self, formatter, name, content):
return html.DIV('Hello World, args = ', content, class_="code")
class DivCodeStreamMacro(WikiMacroBase):
"""A dummy macro returning a Genshi Stream, used by the unit test."""
def expand_macro(self, formatter, name, content):
from genshi.template import MarkupTemplate
tmpl = MarkupTemplate("""
<div>Hello World, args = $args</div>
""")
return tmpl.generate(args=content)
class NoneMacro(WikiMacroBase):
"""A dummy macro returning `None`, used by the unit test."""
def expand_macro(self, formatter, name, content):
return None
class WikiProcessorSampleMacro(WikiMacroBase):
def expand_macro(self, formatter, name, content, args):
if args is None:
return 'Called as a macro: ' + content
else:
return 'Called as a processor with params: <dl>%s</dl>' % \
''.join('<dt>%s</dt><dd>%s</dd>' % kv for kv in args.items()) \
+ content
class SampleResolver(Component):
"""A dummy macro returning a div block, used by the unit test."""
implements(IWikiSyntaxProvider)
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
yield ('link', self._format_link)
def _format_link(self, formatter, ns, target, label):
kind, module = 'text', 'stuff'
try:
kind = 'odd' if int(target) % 2 else 'even'
module = 'thing'
except ValueError:
pass
return html.A(label, class_='%s resolver' % kind,
href=formatter.href(module, target))
class WikiTestCase(unittest.TestCase):
generate_opts = {}
def __init__(self, title, input, correct, file, line, setup=None,
teardown=None, context=None):
unittest.TestCase.__init__(self, 'test')
self.title = title
self.input = input
self.correct = correct
self.file = file
self.line = line
self._setup = setup
self._teardown = teardown
req = Mock(href=Href('/'), abs_href=Href('http://www.example.com/'),
authname='anonymous', perm=MockPerm(), tz=utc, args={},
locale=locale_en, lc_time=locale_en)
if context:
if isinstance(context, tuple):
context = web_context(req, *context)
else:
context = web_context(req, 'wiki', 'WikiStart')
self.context = context
all_test_components = [
HelloWorldMacro, DivHelloWorldMacro, TableHelloWorldMacro,
DivCodeMacro, DivCodeElementMacro, DivCodeStreamMacro,
NoneMacro, WikiProcessorSampleMacro, SampleResolver]
self.env = EnvironmentStub(enable=['trac.*'] + all_test_components)
# -- macros support
self.env.path = ''
# -- intertrac support
self.env.config.set('intertrac', 'trac.title', "Trac's Trac")
self.env.config.set('intertrac', 'trac.url',
"http://trac.edgewall.org")
self.env.config.set('intertrac', 't', 'trac')
self.env.config.set('intertrac', 'th.title', "Trac Hacks")
self.env.config.set('intertrac', 'th.url',
"http://trac-hacks.org")
self.env.config.set('intertrac', 'th.compat', 'false')
# -- safe schemes
self.env.config.set('wiki', 'safe_schemes',
'file,ftp,http,https,svn,svn+ssh,'
'rfc-2396.compatible,rfc-2396+under_score')
# TODO: remove the following lines in order to discover
# all the places were we should use the req.href
# instead of env.href
self.env.href = req.href
self.env.abs_href = req.abs_href
def setUp(self):
wiki = WikiPage(self.env)
wiki.name = 'WikiStart'
wiki.text = '--'
wiki.save('joe', 'Entry page', '::1', datetime.now(utc))
if self._setup:
self._setup(self)
def tearDown(self):
self.env.reset_db()
if self._teardown:
self._teardown(self)
def test(self):
"""Testing WikiFormatter"""
formatter = self.formatter()
v = unicode(formatter.generate(**self.generate_opts))
v = v.replace('\r', '').replace(u'\u200b', '') # FIXME: keep ZWSP
v = strip_line_ws(v, leading=False)
try:
self.assertEquals(self.correct, v)
except AssertionError, e:
msg = to_unicode(e)
match = re.match(r"u?'(.*)' != u?'(.*)'", msg)
if match:
g1 = ["%s\n" % x for x in match.group(1).split(r'\n')]
g2 = ["%s\n" % x for x in match.group(2).split(r'\n')]
expected = ''.join(g1)
actual = ''.join(g2)
wiki = repr(self.input).replace(r'\n', '\n')
diff = ''.join(list(difflib.unified_diff(g1, g2, 'expected',
'actual')))
# Tip: sometimes, 'expected' and 'actual' differ only by
# whitespace, so it can be useful to visualize them, e.g.
# expected = expected.replace(' ', '.')
# actual = actual.replace(' ', '.')
def info(*args):
return '\n========== %s: ==========\n%s' % args
msg = info('expected', expected)
msg += info('actual', actual)
msg += info('wiki', ''.join(wiki))
msg += info('diff', diff)
raise AssertionError( # See below for details
'%s\n\n%s:%s: "%s" (%s flavor)' \
% (msg, self.file, self.line, self.title, formatter.flavor))
def formatter(self):
return HtmlFormatter(self.env, self.context, self.input)
def shortDescription(self):
return 'Test ' + self.title
class OneLinerTestCase(WikiTestCase):
def formatter(self):
return InlineHtmlFormatter(self.env, self.context, self.input)
class EscapeNewLinesTestCase(WikiTestCase):
generate_opts = {'escape_newlines': True}
def formatter(self):
return HtmlFormatter(self.env, self.context, self.input)
class OutlineTestCase(WikiTestCase):
def formatter(self):
from StringIO import StringIO
class Outliner(object):
flavor = 'outliner'
def __init__(self, env, context, input):
self.outliner = OutlineFormatter(env, context)
self.input = input
def generate(self):
out = StringIO()
self.outliner.format(self.input, out)
return out.getvalue()
return Outliner(self.env, self.context, self.input)
def suite(data=None, setup=None, file=__file__, teardown=None, context=None):
suite = unittest.TestSuite()
def add_test_cases(data, filename):
tests = re.compile('^(%s.*)$' % ('=' * 30), re.MULTILINE).split(data)
next_line = 1
line = 0
for title, test in zip(tests[1::2], tests[2::2]):
title = title.lstrip('=').strip()
if line != next_line:
line = next_line
if not test or test == '\n':
continue
next_line += len(test.split('\n')) - 1
if 'SKIP' in title or 'WONTFIX' in title:
continue
blocks = test.split('-' * 30 + '\n')
if len(blocks) < 5:
blocks.extend([None,] * (5 - len(blocks)))
input, page, oneliner, page_escape_nl, outline = blocks[:5]
if page:
page = WikiTestCase(
title, input, page, filename, line, setup,
teardown, context)
if oneliner:
oneliner = OneLinerTestCase(
title, input, oneliner[:-1], filename, line, setup,
teardown, context)
if page_escape_nl:
page_escape_nl = EscapeNewLinesTestCase(
title, input, page_escape_nl, filename, line, setup,
teardown, context)
if outline:
outline = OutlineTestCase(
title, input, outline, filename, line, setup,
teardown, context)
for tc in [page, oneliner, page_escape_nl, outline]:
if tc:
suite.addTest(tc)
if data:
add_test_cases(data, file)
else:
for f in ('wiki-tests.txt', 'wikicreole-tests.txt'):
testfile = os.path.join(os.path.split(file)[0], f)
if os.path.exists(testfile):
data = open(testfile, 'r').read().decode('utf-8')
add_test_cases(data, testfile)
else:
print 'no ', testfile
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/wiki/tests/__init__.py | trac/trac/wiki/tests/__init__.py | import doctest
import unittest
import trac.wiki.api
import trac.wiki.formatter
import trac.wiki.parser
from trac.wiki.tests import formatter, macros, model, wikisyntax
from trac.wiki.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(formatter.suite())
suite.addTest(macros.suite())
suite.addTest(model.suite())
suite.addTest(wikisyntax.suite())
suite.addTest(doctest.DocTestSuite(trac.wiki.api))
suite.addTest(doctest.DocTestSuite(trac.wiki.formatter))
suite.addTest(doctest.DocTestSuite(trac.wiki.parser))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db19.py | trac/trac/upgrades/db19.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Rename the column `sql` in the `report` table for compatibity with MySQL.
"""
cursor.execute("CREATE TEMPORARY TABLE report_old AS SELECT * FROM report")
cursor.execute("DROP TABLE report")
table = Table('report', key='id')[
Column('id', auto_increment=True),
Column('author'),
Column('title'),
Column('query'),
Column('description')
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
cursor.execute("INSERT INTO report (id,author,title,query,description) "
"SELECT id,author,title,sql,description FROM report_old")
cursor.execute("DROP TABLE report_old")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db18.py | trac/trac/upgrades/db18.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
cursor.execute("CREATE TEMPORARY TABLE session_old AS SELECT * FROM session")
cursor.execute("DROP TABLE session")
cursor.execute("CREATE TEMPORARY TABLE ticket_change_old AS SELECT * FROM ticket_change")
cursor.execute("DROP TABLE ticket_change")
# A more normalized session schema where the attributes are stored in
# a separate table
tables = [Table('session', key=('sid', 'authenticated'))[
Column('sid'),
Column('authenticated', type='int'),
Column('last_visit', type='int'),
Index(['last_visit']),
Index(['authenticated'])],
Table('session_attribute', key=('sid', 'authenticated', 'name'))[
Column('sid'),
Column('authenticated', type='int'),
Column('name'),
Column('value')],
Table('ticket_change', key=('ticket', 'time', 'field'))[
Column('ticket', type='int'),
Column('time', type='int'),
Column('author'),
Column('field'),
Column('oldvalue'),
Column('newvalue'),
Index(['ticket']),
Index(['time'])]]
db_connector, _ = DatabaseManager(env).get_connector()
for table in tables:
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
# Add an index to the temporary table to speed up the conversion
cursor.execute("CREATE INDEX session_old_sid_idx ON session_old(sid)")
# Insert the sessions into the new table
cursor.execute("""
INSERT INTO session (sid, last_visit, authenticated)
SELECT distinct s.sid,COALESCE(%s,0),s.authenticated
FROM session_old AS s LEFT JOIN session_old AS s2
ON (s.sid=s2.sid AND s2.var_name='last_visit')
WHERE s.sid IS NOT NULL
""" % env.get_read_db().cast('s2.var_value', 'int'))
cursor.execute("""
INSERT INTO session_attribute (sid, authenticated, name, value)
SELECT s.sid, s.authenticated, s.var_name, s.var_value
FROM session_old s
WHERE s.var_name <> 'last_visit' AND s.sid IS NOT NULL
""")
# Insert ticket change data into the new table
cursor.execute("""
INSERT INTO ticket_change (ticket, time, author, field, oldvalue,
newvalue)
SELECT ticket, time, author, field, oldvalue, newvalue
FROM ticket_change_old
""")
cursor.execute("DROP TABLE session_old")
cursor.execute("DROP TABLE ticket_change_old")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db14.py | trac/trac/upgrades/db14.py | sql = [
"""CREATE TEMPORARY TABLE node_change_old AS SELECT * FROM node_change;""",
"""DROP TABLE node_change;""",
"""CREATE TABLE node_change (
rev text,
path text,
kind char(1),
change char(1),
base_path text,
base_rev text,
UNIQUE(rev, path, change)
);""",
"""INSERT INTO node_change (rev,path,kind,change,base_path,base_rev)
SELECT rev,path,kind,change,base_path,base_rev FROM node_change_old;""",
"""DROP TABLE node_change_old;"""
]
def do_upgrade(env, ver, cursor):
# Wiki pages were accidentially created with the version number starting at
# 0 instead of 1; This should fix that
cursor.execute("SELECT name, version FROM wiki WHERE name IN "
"(SELECT name FROM wiki WHERE version=0) ORDER BY name,"
"version DESC")
result = cursor.fetchall()
if result:
cursor.executemany("UPDATE wiki SET version=version+1 WHERE name=%s "
"and version=%s",
[tuple(row) for row in result])
# Correct difference between db_default.py and upgrades/db10.py: The
# 'change' was missing from the uniqueness constraint
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db28.py | trac/trac/upgrades/db28.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
import os.path
from trac.attachment import Attachment
from trac.util.text import exception_to_unicode, printerr, unicode_quote
from trac.util.translation import _
def do_upgrade(env, version, cursor):
"""Move attachments from the `attachments` directory into `files`, hashing
the filenames in the process."""
path = env.path
old_dir = os.path.join(path, 'attachments')
if not os.path.exists(old_dir):
return
old_stat = os.stat(old_dir)
new_dir = os.path.join(path, 'files', 'attachments')
if not os.path.exists(new_dir):
os.makedirs(new_dir)
cursor.execute("""
SELECT type, id, filename FROM attachment ORDER BY type, id
""")
for row in cursor:
move_attachment_file(env, *row)
# Try to preserve permissions and ownerships of the attachments
# directory for $ENV/files
for dir, dirs, files in os.walk(os.path.join(path, 'files')):
try:
if hasattr(os, 'chmod'):
os.chmod(dir, old_stat.st_mode)
if hasattr(os, 'chflags') and hasattr(old_stat, 'st_flags'):
os.chflags(dir, old_stat.st_flags)
if hasattr(os, 'chown'):
os.chown(dir, old_stat.st_uid, old_stat.st_gid)
except OSError:
pass
# Remove empty directory hierarchy
try:
for dir, dirs, files in os.walk(old_dir, topdown=False):
os.rmdir(dir)
except OSError, e:
env.log.warning("Can't delete old attachments directory %s: %s",
old_dir, exception_to_unicode(e))
# TRANSLATOR: Wrap message to 80 columns
printerr(_("""\
The upgrade of attachments was successful, but the old attachments directory:
%(src_dir)s
couldn't be removed, possibly due to the presence of files that weren't
referenced in the database. The error was:
%(exception)s
This error can be ignored, but for keeping your environment clean you should
backup any remaining files in that directory and remove it manually.
""", src_dir=old_dir, exception=exception_to_unicode(e)))
def move_attachment_file(env, parent_realm, parent_id, filename):
old_path = os.path.join(env.path, 'attachments', parent_realm,
unicode_quote(parent_id))
if filename:
old_path = os.path.join(old_path, unicode_quote(filename))
old_path = os.path.normpath(old_path)
if os.path.isfile(old_path):
new_path = Attachment._get_path(env.path, parent_realm, parent_id,
filename)
try:
os.renames(old_path, new_path)
except OSError:
printerr(_("Unable to move attachment from:\n\n"
" %(old_path)s\n\nto:\n\n %(new_path)s\n",
old_path=old_path, new_path=new_path))
raise
else:
env.log.warning("Can't find file for 'attachment:%s:%s:%s', ignoring",
filename, parent_realm, parent_id)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db16.py | trac/trac/upgrades/db16.py | from trac.db import Table, Column, Index
def do_upgrade(env, ver, cursor):
# Add a few new indices to speed things up
cursor.execute("CREATE INDEX wiki_time_idx ON wiki (time)")
cursor.execute("CREATE INDEX revision_time_idx ON revision (time)")
cursor.execute("CREATE INDEX ticket_status_idx ON ticket (status)")
cursor.execute("CREATE INDEX ticket_time_idx ON ticket (time)")
# Fix missing single column primary key constraints
if env.config.get('trac', 'database').startswith('postgres'):
cursor.execute("ALTER TABLE system ADD CONSTRAINT system_pkey PRIMARY KEY (name)")
cursor.execute("ALTER TABLE revision ADD CONSTRAINT revision_pkey PRIMARY KEY (rev)")
cursor.execute("ALTER TABLE ticket ADD CONSTRAINT ticket_pkey PRIMARY KEY (id)")
cursor.execute("ALTER TABLE component ADD CONSTRAINT component_pkey PRIMARY KEY (name)")
cursor.execute("ALTER TABLE milestone ADD CONSTRAINT milestone_pkey PRIMARY KEY (name)")
cursor.execute("ALTER TABLE version ADD CONSTRAINT version_pkey PRIMARY KEY (name)")
cursor.execute("ALTER TABLE report ADD CONSTRAINT report_pkey PRIMARY KEY (id)")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db3.py | trac/trac/upgrades/db3.py | sql = """
CREATE TABLE attachment (
type text,
id text,
filename text,
size integer,
time integer,
description text,
author text,
ipnr text,
UNIQUE(type,id,filename)
);
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
env.config.set('attachment', 'max_size', '262144')
env.config.save()
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db29.py | trac/trac/upgrades/db29.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
import shutil
from trac.util import create_unique_file
from trac.util.text import exception_to_unicode
_svn_components = [
'svn_fs.SubversionConnector',
'svn_prop.SubversionMergePropertyDiffRenderer',
'svn_prop.SubversionMergePropertyRenderer',
'svn_prop.SubversionPropertyRenderer',
]
_old_path = 'trac.versioncontrol.'
_new_path = 'tracopt.versioncontrol.svn.'
def do_upgrade(env, version, cursor):
"""Automatically enable tracopt.versioncontrol.svn.* components,
unless they were explicitly disabled or the new svn components are
already enabled.
"""
enable = [c for c in _svn_components
if env.is_component_enabled(_old_path + c) and
not env.is_component_enabled(_new_path + c)]
if not enable:
return
try:
backup, f = create_unique_file(env.config.filename
+ '.tracopt-svn.bak')
f.close()
shutil.copyfile(env.config.filename, backup)
env.log.info("Saved backup of configuration file in %s", backup)
except IOError, e:
env.log.warn("Couldn't save backup of configuration file (%s)",
exception_to_unicode(e))
for c in enable:
env.config.set('components', _new_path + c, 'enabled')
env.config.save()
env.log.info("Enabled components %r to cope with the move from %s to %s.",
enable,
_old_path.replace('.', '/'), _new_path.replace('.', '/'))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db27.py | trac/trac/upgrades/db27.py | from trac.db import Table, Column, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Modify the cache table to use an integer id."""
# No need to keep the previous content
cursor.execute("DROP TABLE cache")
table = Table('cache', key='id')[
Column('id', type='int'),
Column('generation', type='int'),
Column('key'),
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db23.py | trac/trac/upgrades/db23.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
# Make changeset cache multi-repository aware
cursor.execute("CREATE TEMPORARY TABLE rev_old "
"AS SELECT * FROM revision")
cursor.execute("DROP TABLE revision")
cursor.execute("CREATE TEMPORARY TABLE nc_old "
"AS SELECT * FROM node_change")
cursor.execute("DROP TABLE node_change")
tables = [Table('repository', key=('id', 'name'))[
Column('id'),
Column('name'),
Column('value')],
Table('revision', key=('repos', 'rev'))[
Column('repos'),
Column('rev', key_size=20),
Column('time', type='int'),
Column('author'),
Column('message'),
Index(['repos', 'time'])],
Table('node_change', key=('repos', 'rev', 'path', 'change_type'))[
Column('repos', key_size=56),
Column('rev', key_size=20),
Column('path', key_size=255),
Column('node_type', size=1),
Column('change_type', size=1, key_size=2),
Column('base_path'),
Column('base_rev'),
Index(['repos', 'rev'])]]
db_connector, _ = DatabaseManager(env)._get_connector()
for table in tables:
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
cursor.execute("INSERT INTO revision (repos,rev,time,author,message) "
"SELECT '',rev,time,author,message FROM rev_old")
cursor.execute("DROP TABLE rev_old")
cursor.execute("INSERT INTO node_change (repos,rev,path,node_type,"
"change_type,base_path,base_rev) "
"SELECT '',rev,path,node_type,change_type,base_path,"
"base_rev FROM nc_old")
cursor.execute("DROP TABLE nc_old")
cursor.execute("INSERT INTO repository (id,name,value) "
"SELECT '',name,value FROM system "
"WHERE name IN ('repository_dir', 'youngest_rev')")
cursor.execute("DELETE FROM system "
"WHERE name IN ('repository_dir', 'youngest_rev')")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db26.py | trac/trac/upgrades/db26.py |
def do_upgrade(env, ver, cursor):
"""Zero-pad Subversion revision numbers in the cache."""
cursor.execute("""
SELECT id, value FROM repository WHERE name='repository_dir'
""")
for id in [id for id, dir in cursor if dir.startswith('svn:')]:
cursor.execute("SELECT DISTINCT rev FROM revision WHERE repos=%s",
(id,))
for rev in set(row[0] for row in cursor):
cursor.execute("""
UPDATE revision SET rev=%s WHERE repos=%s AND rev=%s
""", ('%010d' % int(rev), id, rev))
cursor.execute("SELECT DISTINCT rev FROM node_change WHERE repos=%s",
(id,))
for rev in set(row[0] for row in cursor):
cursor.execute("""
UPDATE node_change SET rev=%s WHERE repos=%s AND rev=%s
""", ('%010d' % int(rev), id, rev))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db7.py | trac/trac/upgrades/db7.py | sql = [
#-- Add readonly flag to 'wiki'
"""CREATE TEMPORARY TABLE wiki_old AS SELECT * FROM wiki;""",
"""DROP TABLE wiki;""",
"""CREATE TABLE wiki (
name text,
version integer,
time integer,
author text,
ipnr text,
text text,
comment text,
readonly integer,
UNIQUE(name,version)
);""",
"""INSERT INTO wiki(name,version,time,author,ipnr,text,comment,readonly) SELECT name,version,time,author,ipnr,text,comment,0 FROM wiki_old;"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db6.py | trac/trac/upgrades/db6.py | sql = """
CREATE TABLE ticket_custom (
ticket integer,
name text,
value text,
UNIQUE(ticket,name)
);
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db24.py | trac/trac/upgrades/db24.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
# Change repository key from reponame to a surrogate id
cursor.execute("SELECT id FROM repository "
"UNION SELECT repos AS id FROM revision "
"UNION SELECT repos AS id FROM node_change "
"ORDER BY id")
id_name_list = [(i + 1, name) for i, (name,) in enumerate(cursor)]
cursor.execute("CREATE TEMPORARY TABLE repo_old "
"AS SELECT * FROM repository")
cursor.execute("DROP TABLE repository")
cursor.execute("CREATE TEMPORARY TABLE rev_old "
"AS SELECT * FROM revision")
cursor.execute("DROP TABLE revision")
cursor.execute("CREATE TEMPORARY TABLE nc_old "
"AS SELECT * FROM node_change")
cursor.execute("DROP TABLE node_change")
tables = [Table('repository', key=('id', 'name'))[
Column('id', type='int'),
Column('name'),
Column('value')],
Table('revision', key=('repos', 'rev'))[
Column('repos', type='int'),
Column('rev', key_size=20),
Column('time', type='int'),
Column('author'),
Column('message'),
Index(['repos', 'time'])],
Table('node_change', key=('repos', 'rev', 'path', 'change_type'))[
Column('repos', type='int'),
Column('rev', key_size=20),
Column('path', key_size=255),
Column('node_type', size=1),
Column('change_type', size=1, key_size=2),
Column('base_path'),
Column('base_rev'),
Index(['repos', 'rev'])]]
db_connector, _ = DatabaseManager(env)._get_connector()
for table in tables:
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
cursor.executemany("INSERT INTO repository (id,name,value) "
"VALUES (%s,'name',%s)", id_name_list)
cursor.executemany("INSERT INTO repository (id,name,value) "
"SELECT %s,name,value FROM repo_old WHERE id=%s",
id_name_list)
cursor.execute("DROP TABLE repo_old")
cursor.executemany("INSERT INTO revision (repos,rev,time,author,message) "
"SELECT %s,rev,time,author,message FROM rev_old "
"WHERE repos=%s", id_name_list)
cursor.execute("DROP TABLE rev_old")
cursor.executemany("INSERT INTO node_change (repos,rev,path,node_type,"
" change_type,base_path,base_rev) "
"SELECT %s,rev,path,node_type,change_type,base_path,"
" base_rev FROM nc_old WHERE repos=%s", id_name_list)
cursor.execute("DROP TABLE nc_old")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db17.py | trac/trac/upgrades/db17.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Rename the columns `kind` and `change` in the `node_change` table for
compatibity with MySQL.
"""
cursor.execute("CREATE TEMPORARY TABLE nc_old AS SELECT * FROM node_change")
cursor.execute("DROP TABLE node_change")
table = Table('node_change', key=('rev', 'path', 'change_type'))[
Column('rev'),
Column('path'),
Column('node_type', size=1),
Column('change_type', size=1),
Column('base_path'),
Column('base_rev'),
Index(['rev'])
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
cursor.execute("INSERT INTO node_change (rev,path,node_type,change_type,"
"base_path,base_rev) SELECT rev,path,kind,change,"
"base_path,base_rev FROM nc_old")
cursor.execute("DROP TABLE nc_old")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db5.py | trac/trac/upgrades/db5.py | sql = [
#-- Add unique id, descr to 'milestone'
"""CREATE TEMPORARY TABLE milestone_old AS SELECT * FROM milestone;""",
"""DROP TABLE milestone;""",
"""CREATE TABLE milestone (
id integer PRIMARY KEY,
name text,
time integer,
descr text,
UNIQUE(name)
);""",
"""
INSERT INTO milestone(name,time, descr) SELECT name,time,'' FROM milestone_old;""",
"""DROP TABLE milestone_old;""",
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db13.py | trac/trac/upgrades/db13.py | sql = [
#-- Add ticket_type to 'ticket', remove the unused 'url' column
"""CREATE TEMPORARY TABLE ticket_old AS SELECT * FROM ticket;""",
"""DROP TABLE ticket;""",
"""CREATE TABLE ticket (
id integer PRIMARY KEY,
type text, -- the nature of the ticket
time integer, -- the time it was created
changetime integer,
component text,
severity text,
priority text,
owner text, -- who is this ticket assigned to
reporter text,
cc text, -- email addresses to notify
version text, --
milestone text, --
status text,
resolution text,
summary text, -- one-line summary
description text, -- problem description (long)
keywords text
);""",
"""INSERT INTO ticket(id, type, time, changetime, component, severity, priority,
owner, reporter, cc, version, milestone, status, resolution,
summary, description, keywords)
SELECT id, 'defect', time, changetime, component, severity, priority, owner,
reporter, cc, version, milestone, status, resolution, summary,
description, keywords FROM ticket_old
WHERE COALESCE(severity,'') <> 'enhancement';""",
"""INSERT INTO ticket(id, type, time, changetime, component, severity, priority,
owner, reporter, cc, version, milestone, status, resolution,
summary, description, keywords)
SELECT id, 'enhancement', time, changetime, component, 'normal', priority,
owner, reporter, cc, version, milestone, status, resolution, summary,
description, keywords FROM ticket_old
WHERE severity = 'enhancement';""",
"""INSERT INTO enum (type, name, value) VALUES ('ticket_type', 'defect', '1');""",
"""INSERT INTO enum (type, name, value) VALUES ('ticket_type', 'enhancement', '2');""",
"""INSERT INTO enum (type, name, value) VALUES ('ticket_type', 'task', '3');""",
"""DELETE FROM enum WHERE type = 'severity' AND name = 'enhancement';""",
"""DROP TABLE ticket_old;""",
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
# -- upgrade reports (involve a rename)
cursor.execute("SELECT id,sql FROM report")
reports = {}
for id, rsql in cursor:
reports[id] = rsql
for id, rsql in reports.items():
parts = rsql.split('ORDER BY', 1)
ending = len(parts)>1 and 'ORDER BY'+parts[1] or ''
cursor.execute("UPDATE report SET sql=%s WHERE id=%s",
(parts[0].replace('severity,',
't.type AS type, severity,') + ending,
id))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db21.py | trac/trac/upgrades/db21.py |
def do_upgrade(env, ver, cursor):
"""Upgrade the reports to better handle the new workflow capabilities"""
owner = env.get_read_db().concat('owner', "' *'")
cursor.execute('SELECT id, query, description FROM report')
reports = cursor.fetchall()
for report, query, description in reports:
q, d = query, description
if query:
# All states other than 'closed' are "active".
q = q.replace("IN ('new', 'assigned', 'reopened')", "<> 'closed'")
# Add a status column instead of adding an '*' to the owner's name
# for the 'assigned' state.
q = q.replace("(CASE status WHEN 'assigned' THEN %s "
"ELSE owner END) AS owner" % owner, "owner, status")
if description:
d = d.replace(" * If a ticket has been accepted, a '*' is"
" appended after the owner's name\n", '')
if q != query or d != description:
cursor.execute("""
UPDATE report SET query=%s, description=%s WHERE id=%s
""", (q, d, report))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db20.py | trac/trac/upgrades/db20.py | from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
youngest = ''
cursor.execute("SELECT value FROM system WHERE name='repository_dir'")
for repository_dir, in cursor:
if repository_dir.startswith('svn:'):
cursor.execute("SELECT rev FROM revision "
"ORDER BY -LENGTH(rev), rev DESC LIMIT 1")
row = cursor.fetchone()
youngest = row and row[0] or ''
else:
print 'Please perform a "repository resync" after this upgrade.'
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db9.py | trac/trac/upgrades/db9.py | import time
sql = [
#-- Remove the unused lock table
"""DROP TABLE lock;""",
#-- Separate anonymous from authenticated sessions.
"""CREATE TEMPORARY TABLE session_old AS SELECT * FROM session;""",
"""DELETE FROM session;""",
"""INSERT INTO session (username,var_name,var_value)
SELECT username,var_name,var_value FROM session_old
WHERE sid IN (SELECT DISTINCT sid FROM session_old
WHERE username!='anonymous' AND var_name='last_visit'
GROUP BY username ORDER BY var_value DESC);""",
"""INSERT INTO session (sid,username,var_name,var_value)
SELECT sid,username,var_name,var_value FROM session_old
WHERE username='anonymous';""",
"""DROP TABLE session_old;"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db11.py | trac/trac/upgrades/db11.py | sql = [
#-- Remove empty values from the milestone list
"""DELETE FROM milestone WHERE COALESCE(name,'')='';""",
#-- Add a description column to the version table, and remove unnamed versions
"""CREATE TEMPORARY TABLE version_old AS SELECT * FROM version;""",
"""DROP TABLE version;""",
"""CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);""",
"""INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';""",
#-- Add a description column to the component table, and remove unnamed components
"""CREATE TEMPORARY TABLE component_old AS SELECT * FROM component;""",
"""DROP TABLE component;""",
"""CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);""",
"""INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';""",
"""DROP TABLE version_old;""",
"""DROP TABLE component_old;"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db4.py | trac/trac/upgrades/db4.py | sql = [
"""CREATE TABLE session (
sid text,
username text,
var_name text,
var_value text,
UNIQUE(sid,var_name)
);""",
"""CREATE INDEX session_idx ON session(sid,var_name);"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/__init__.py | trac/trac/upgrades/__init__.py | python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false | |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db10.py | trac/trac/upgrades/db10.py | sql = [
#-- Make the node_change table contain more information, and force a resync
"""DROP TABLE revision;""",
"""DROP TABLE node_change;""",
"""CREATE TABLE revision (
rev text PRIMARY KEY,
time integer,
author text,
message text
);""",
"""CREATE TABLE node_change (
rev text,
path text,
kind char(1), -- 'D' for directory, 'F' for file
change char(1),
base_path text,
base_rev text,
UNIQUE(rev, path, change)
);"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
print 'Please perform a "resync" after this upgrade.'
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db25.py | trac/trac/upgrades/db25.py | from trac.db import DatabaseManager
def do_upgrade(env, ver, cursor):
"""Convert time values from integer seconds to integer microseconds."""
tables = [
('attachment', {'time': ('int', 'int64')}),
('wiki', {'time': ('int', 'int64')}),
('revision', {'time': ('int', 'int64')}),
('ticket', {'time': ('int', 'int64'),
'changetime': ('int', 'int64')}),
('ticket_change', {'time': ('int', 'int64')}),
('milestone', {'due': ('int', 'int64'),
'completed': ('int', 'int64')}),
('version', {'time': ('int', 'int64')}),
]
db_connector, _ = DatabaseManager(env).get_connector()
for table, columns in tables:
# Alter column types
for sql in db_connector.alter_column_types(table, columns):
cursor.execute(sql)
# Convert timestamps to microseconds
cursor.execute("UPDATE %s SET %s" % (table,
', '.join("%s=%s*1000000" % (column, column)
for column in columns)))
# Convert comment edit timestamps to microseconds
db = env.get_read_db()
cursor.execute("""
UPDATE ticket_change SET newvalue=%s*1000000
WHERE field %s""" % (db.cast('newvalue', 'int64'), db.like()),
('_comment%',))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db15.py | trac/trac/upgrades/db15.py | from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
cursor.execute("""
CREATE TEMPORARY TABLE session_old AS SELECT * FROM session
""")
cursor.execute("DROP TABLE session")
session_table = Table('session', key=('sid', 'authenticated', 'var_name'))[
Column('sid'),
Column('authenticated', type='int'),
Column('var_name'),
Column('var_value')]
db_backend, _ = DatabaseManager(env).get_connector()
for stmt in db_backend.to_sql(session_table):
cursor.execute(stmt)
cursor.execute("""
INSERT INTO session (sid,authenticated,var_name,var_value)
SELECT sid,authenticated,var_name,var_value FROM session_old
""")
cursor.execute("DROP TABLE session_old")
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db22.py | trac/trac/upgrades/db22.py | from trac.db import Table, Column, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Add the cache table."""
table = Table('cache', key='id')[
Column('id'),
Column('generation', type='int')
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db8.py | trac/trac/upgrades/db8.py | import time
d = {'now':time.time()}
sql = [
#-- Separate between due and completed time for milestones.
"""CREATE TEMPORARY TABLE milestone_old AS SELECT * FROM milestone;""",
"""DROP TABLE milestone;""",
"""CREATE TABLE milestone (
name text PRIMARY KEY,
due integer, -- Due date/time
completed integer, -- Completed date/time
description text
);""",
"""INSERT INTO milestone(name,due,completed,description)
SELECT name,time,time,descr FROM milestone_old WHERE time <= %(now)s;""" % d,
"""INSERT INTO milestone(name,due,description)
SELECT name,time,descr FROM milestone_old WHERE time > %(now)s;""" % d
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/upgrades/db12.py | trac/trac/upgrades/db12.py | sql = [
#-- Some anonymous session might have been left over
"""DELETE FROM session WHERE username='anonymous';""",
#-- Schema change: use an authenticated flag instead of separate sid/username
#-- columns
"""CREATE TEMPORARY TABLE session_old AS SELECT * FROM session;""",
"""DROP TABLE session;""",
"""CREATE TABLE session (
sid text,
authenticated int,
var_name text,
var_value text,
UNIQUE(sid, var_name)
);""",
"""INSERT INTO session(sid,authenticated,var_name,var_value)
SELECT DISTINCT sid,0,var_name,var_value FROM session_old
WHERE sid IS NULL;""",
"""INSERT INTO session(sid,authenticated,var_name,var_value)
SELECT DISTINCT username,1,var_name,var_value FROM session_old
WHERE sid IS NULL;""",
"""DROP TABLE session_old;"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/search/api.py | trac/trac/search/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import re
from trac.core import *
class ISearchSource(Interface):
"""Extension point interface for adding search sources to the search
system.
"""
def get_search_filters(req):
"""Return a list of filters that this search source supports.
Each filter must be a `(name, label[, default])` tuple, where `name` is
the internal name, `label` is a human-readable name for display and
`default` is an optional boolean for determining whether this filter
is searchable by default.
"""
def get_search_results(req, terms, filters):
"""Return a list of search results matching each search term in `terms`.
The `filters` parameters is a list of the enabled filters, each item
being the name of the tuples returned by `get_search_events`.
The events returned by this function must be tuples of the form
`(href, title, date, author, excerpt).`
"""
def search_to_sql(db, columns, terms):
"""Convert a search query into an SQL WHERE clause and corresponding
parameters.
The result is returned as an `(sql, params)` tuple.
"""
assert columns and terms
likes = ['%s %s' % (i, db.like()) for i in columns]
c = ' OR '.join(likes)
sql = '(' + ') AND ('.join([c] * len(terms)) + ')'
args = []
for t in terms:
args.extend(['%' + db.like_escape(t) + '%'] * len(columns))
return sql, tuple(args)
def search_to_regexps(terms):
"""Convert search query terms into regular expressions."""
return [re.compile(re.escape(term)) for term in terms]
def shorten_result(text='', keywords=[], maxlen=240, fuzz=60):
if not text:
text = ''
text_low = text.lower()
beg = -1
for k in keywords:
i = text_low.find(k.lower())
if (i > -1 and i < beg) or beg == -1:
beg = i
excerpt_beg = 0
if beg > fuzz:
for sep in ('.', ':', ';', '='):
eb = text.find(sep, beg - fuzz, beg - 1)
if eb > -1:
eb += 1
break
else:
eb = beg - fuzz
excerpt_beg = eb
if excerpt_beg < 0:
excerpt_beg = 0
msg = text[excerpt_beg:beg+maxlen]
if beg > fuzz:
msg = '... ' + msg
if beg < len(text)-maxlen:
msg = msg + ' ...'
return msg
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/search/web_ui.py | trac/trac/search/web_ui.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2004 Jonas Borgström <jonas@edgewall.com>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
import pkg_resources
import re
from genshi.builder import tag
from trac.config import IntOption, ListOption
from trac.core import *
from trac.mimeview import RenderingContext
from trac.perm import IPermissionRequestor
from trac.search.api import ISearchSource
from trac.util.datefmt import format_datetime, user_time
from trac.util.html import find_element
from trac.util.presentation import Paginator
from trac.util.text import quote_query_string
from trac.util.translation import _
from trac.web import IRequestHandler
from trac.web.chrome import (INavigationContributor, ITemplateProvider,
add_link, add_stylesheet, add_warning,
web_context)
from trac.wiki.api import IWikiSyntaxProvider
from trac.wiki.formatter import extract_link
class SearchModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
ITemplateProvider, IWikiSyntaxProvider)
search_sources = ExtensionPoint(ISearchSource)
RESULTS_PER_PAGE = 10
min_query_length = IntOption('search', 'min_query_length', 3,
"""Minimum length of query string allowed when performing a search.""")
default_disabled_filters = ListOption('search', 'default_disabled_filters',
doc="""Specifies which search filters should be disabled by
default on the search page. This will also restrict the
filters for the quick search function. The filter names
defined by default components are: `wiki`, `ticket`,
`milestone` and `changeset`. For plugins, look for
their implementation of the ISearchSource interface, in
the `get_search_filters()` method, the first member of
returned tuple. Once disabled, search filters can still
be manually enabled by the user on the search page.
(since 0.12)""")
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'search'
def get_navigation_items(self, req):
if 'SEARCH_VIEW' in req.perm:
yield ('mainnav', 'search',
tag.a(_('Search'), href=req.href.search(), accesskey=4))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['SEARCH_VIEW']
# IRequestHandler methods
def match_request(self, req):
return re.match(r'/search(?:/opensearch)?$', req.path_info) is not None
def process_request(self, req):
req.perm.assert_permission('SEARCH_VIEW')
if req.path_info == '/search/opensearch':
return ('opensearch.xml', {},
'application/opensearchdescription+xml')
query = req.args.get('q')
available_filters = []
for source in self.search_sources:
available_filters.extend(source.get_search_filters(req) or [])
available_filters.sort(key=lambda f: f[1].lower())
filters = self._get_selected_filters(req, available_filters)
data = self._prepare_data(req, query, available_filters, filters)
if query:
data['quickjump'] = self._check_quickjump(req, query)
if query.startswith('!'):
query = query[1:]
terms = self._parse_query(req, query)
if terms:
results = self._do_search(req, terms, filters)
if results:
data.update(self._prepare_results(req, filters, results))
add_stylesheet(req, 'common/css/search.css')
return 'search.html', data, None
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.search', 'templates')]
# IWikiSyntaxProvider methods
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
yield ('search', self._format_link)
def _format_link(self, formatter, ns, target, label):
path, query, fragment = formatter.split_link(target)
if path:
href = formatter.href.search(q=path)
if query:
href += '&' + quote_query_string(query[1:])
else:
href = formatter.href.search() + quote_query_string(query)
href += fragment
return tag.a(label, class_='search', href=href)
# IRequestHandler helper methods
def _get_selected_filters(self, req, available_filters):
"""Return selected filters or the default filters if none was selected.
"""
filters = [f[0] for f in available_filters if f[0] in req.args]
if not filters:
filters = [f[0] for f in available_filters
if f[0] not in self.default_disabled_filters and
(len(f) < 3 or len(f) > 2 and f[2])]
return filters
def _prepare_data(self, req, query, available_filters, filters):
return {'filters': [{'name': f[0], 'label': f[1],
'active': f[0] in filters}
for f in available_filters],
'query': query, 'quickjump': None, 'results': []}
def _check_quickjump(self, req, kwd):
"""Look for search shortcuts"""
noquickjump = int(req.args.get('noquickjump', '0'))
# Source quickjump FIXME: delegate to ISearchSource.search_quickjump
quickjump_href = None
if kwd[0] == '/':
quickjump_href = req.href.browser(kwd)
name = kwd
description = _('Browse repository path %(path)s', path=kwd)
else:
context = web_context(req, 'search')
link = find_element(extract_link(self.env, context, kwd), 'href')
if link is not None:
quickjump_href = link.attrib.get('href')
name = link.children
description = link.attrib.get('title', '')
if quickjump_href:
# Only automatically redirect to local quickjump links
if not quickjump_href.startswith(req.base_path or '/'):
noquickjump = True
if noquickjump:
return {'href': quickjump_href, 'name': tag.EM(name),
'description': description}
else:
req.redirect(quickjump_href)
def _get_search_terms(self, query):
"""Break apart a search query into its various search terms.
Terms are grouped implicitly by word boundary, or explicitly by (single
or double) quotes.
"""
terms = []
for term in re.split('(".*?")|(\'.*?\')|(\s+)', query):
if term is not None and term.strip():
if term[0] == term[-1] and term[0] in "'\"":
term = term[1:-1]
terms.append(term)
return terms
def _parse_query(self, req, query):
"""Parse query and refuse those which would result in a huge result set
"""
terms = self._get_search_terms(query)
if terms and (len(terms) > 1 or
len(terms[0]) >= self.min_query_length):
return terms
add_warning(req, _('Search query too short. '
'Query must be at least %(num)s characters long.',
num=self.min_query_length))
def _do_search(self, req, terms, filters):
results = []
for source in self.search_sources:
results.extend(source.get_search_results(req, terms, filters)
or [])
return sorted(results, key=lambda x: x[2], reverse=True)
def _prepare_results(self, req, filters, results):
page = int(req.args.get('page', '1'))
results = Paginator(results, page - 1, self.RESULTS_PER_PAGE)
for idx, result in enumerate(results):
results[idx] = {'href': result[0], 'title': result[1],
'date': user_time(req, format_datetime, result[2]),
'author': result[3], 'excerpt': result[4]}
pagedata = []
shown_pages = results.get_shown_pages(21)
for shown_page in shown_pages:
page_href = req.href.search([(f, 'on') for f in filters],
q=req.args.get('q'),
page=shown_page, noquickjump=1)
pagedata.append([page_href, None, str(shown_page),
'page ' + str(shown_page)])
fields = ['href', 'class', 'string', 'title']
results.shown_pages = [dict(zip(fields, p)) for p in pagedata]
results.current_page = {'href': None, 'class': 'current',
'string': str(results.page + 1),
'title':None}
if results.has_next_page:
next_href = req.href.search(zip(filters, ['on'] * len(filters)),
q=req.args.get('q'), page=page + 1,
noquickjump=1)
add_link(req, 'next', next_href, _('Next Page'))
if results.has_previous_page:
prev_href = req.href.search(zip(filters, ['on'] * len(filters)),
q=req.args.get('q'), page=page - 1,
noquickjump=1)
add_link(req, 'prev', prev_href, _('Previous Page'))
page_href = req.href.search(
zip(filters, ['on'] * len(filters)), q=req.args.get('q'),
noquickjump=1)
return {'results': results, 'page_href': page_href}
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/search/__init__.py | trac/trac/search/__init__.py | from trac.search.api import * | python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/timeline/api.py | trac/trac/timeline/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2005 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
from trac.core import *
class ITimelineEventProvider(Interface):
"""Extension point interface for adding sources for timed events to the
timeline.
"""
def get_timeline_filters(req):
"""Return a list of filters that this event provider supports.
Each filter must be a (name, label) tuple, where `name` is the internal
name, and `label` is a human-readable name for display.
Optionally, the tuple can contain a third element, `checked`.
If `checked` is omitted or True, the filter is active by default,
otherwise it will be inactive.
"""
def get_timeline_events(req, start, stop, filters):
"""Return a list of events in the time range given by the `start` and
`stop` parameters.
The `filters` parameters is a list of the enabled filters, each item
being the name of the tuples returned by `get_timeline_filters`.
Since 0.11, the events are `(kind, date, author, data)` tuples,
where `kind` is a string used for categorizing the event, `date`
is a `datetime` object, `author` is a string and `data` is some
private data that the component will reuse when rendering the event.
When the event has been created indirectly by another module,
like this happens when calling `AttachmentModule.get_timeline_events()`
the tuple can also specify explicitly the provider by returning tuples
of the following form: `(kind, date, author, data, provider)`.
Before version 0.11, the events returned by this function used to
be tuples of the form `(kind, href, title, date, author, markup)`.
This is still supported but less flexible, as `href`, `title` and
`markup` are not context dependent.
"""
def render_timeline_event(context, field, event):
"""Display the title of the event in the given context.
:param context: the `RenderingContext` object that can be used for
rendering
:param field: what specific part information from the event should
be rendered: can be the 'title', the 'description' or
the 'url'
:param event: the event tuple, as returned by `get_timeline_events`
"""
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/timeline/web_ui.py | trac/trac/timeline/web_ui.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2009 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <jonas@edgewall.com>
# Copyright (C) 2004-2005 Christopher Lenz <cmlenz@gmx.de>
# Copyright (C) 2005-2006 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
# Christopher Lenz <cmlenz@gmx.de>
from datetime import datetime, timedelta
import pkg_resources
import re
from genshi.builder import tag
from trac.config import IntOption, BoolOption
from trac.core import *
from trac.perm import IPermissionRequestor
from trac.timeline.api import ITimelineEventProvider
from trac.util import as_int
from trac.util.datefmt import format_date, format_datetime, format_time, \
parse_date, to_utimestamp, to_datetime, utc, \
pretty_timedelta, user_time
from trac.util.text import exception_to_unicode, to_unicode
from trac.util.translation import _, tag_
from trac.web import IRequestHandler, IRequestFilter
from trac.web.chrome import (Chrome, INavigationContributor, ITemplateProvider,
add_link, add_stylesheet, auth_link, prevnext_nav,
web_context)
from trac.wiki.api import IWikiSyntaxProvider
from trac.wiki.formatter import concat_path_query_fragment, \
split_url_into_path_query_fragment
class TimelineModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
IRequestFilter, ITemplateProvider, IWikiSyntaxProvider)
event_providers = ExtensionPoint(ITimelineEventProvider)
default_daysback = IntOption('timeline', 'default_daysback', 30,
"""Default number of days displayed in the Timeline, in days.
(''since 0.9.'')""")
max_daysback = IntOption('timeline', 'max_daysback', 90,
"""Maximum number of days (-1 for unlimited) displayable in the
Timeline. (''since 0.11'')""")
abbreviated_messages = BoolOption('timeline', 'abbreviated_messages',
True,
"""Whether wiki-formatted event messages should be truncated or not.
This only affects the default rendering, and can be overriden by
specific event providers, see their own documentation.
(''Since 0.11'')""")
_authors_pattern = re.compile(r'(-)?(?:"([^"]*)"|\'([^\']*)\'|([^\s]+))')
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'timeline'
def get_navigation_items(self, req):
if 'TIMELINE_VIEW' in req.perm:
yield ('mainnav', 'timeline',
tag.a(_("Timeline"), href=req.href.timeline(), accesskey=2))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['TIMELINE_VIEW']
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/timeline'
def process_request(self, req):
req.perm.assert_permission('TIMELINE_VIEW')
format = req.args.get('format')
maxrows = int(req.args.get('max', 50 if format == 'rss' else 0))
lastvisit = int(req.session.get('timeline.lastvisit', '0'))
# indication of new events is unchanged when form is updated by user
revisit = any(a in req.args for a in ['update', 'from', 'daysback',
'author'])
if revisit:
lastvisit = int(req.session.get('timeline.nextlastvisit',
lastvisit))
# Parse the from date and adjust the timestamp to the last second of
# the day
fromdate = today = datetime.now(req.tz)
yesterday = to_datetime(today.replace(tzinfo=None) - timedelta(days=1),
req.tz)
precisedate = precision = None
if 'from' in req.args:
# Acquire from date only from non-blank input
reqfromdate = req.args['from'].strip()
if reqfromdate:
precisedate = user_time(req, parse_date, reqfromdate)
fromdate = precisedate.astimezone(req.tz)
precision = req.args.get('precision', '')
if precision.startswith('second'):
precision = timedelta(seconds=1)
elif precision.startswith('minute'):
precision = timedelta(minutes=1)
elif precision.startswith('hour'):
precision = timedelta(hours=1)
else:
precision = None
fromdate = to_datetime(datetime(fromdate.year, fromdate.month,
fromdate.day, 23, 59, 59, 999999),
req.tz)
daysback = as_int(req.args.get('daysback'),
90 if format == 'rss' else None)
if daysback is None:
daysback = as_int(req.session.get('timeline.daysback'), None)
if daysback is None:
daysback = self.default_daysback
daysback = max(0, daysback)
if self.max_daysback >= 0:
daysback = min(self.max_daysback, daysback)
authors = req.args.get('authors')
if authors is None and format != 'rss':
authors = req.session.get('timeline.authors')
authors = (authors or '').strip()
data = {'fromdate': fromdate, 'daysback': daysback,
'authors': authors,
'today': user_time(req, format_date, today),
'yesterday': user_time(req, format_date, yesterday),
'precisedate': precisedate, 'precision': precision,
'events': [], 'filters': [],
'abbreviated_messages': self.abbreviated_messages,
'lastvisit': lastvisit}
available_filters = []
for event_provider in self.event_providers:
available_filters += event_provider.get_timeline_filters(req) or []
# check the request or session for enabled filters, or use default
filters = [f[0] for f in available_filters if f[0] in req.args]
if not filters and format != 'rss':
filters = [f[0] for f in available_filters
if req.session.get('timeline.filter.' + f[0]) == '1']
if not filters:
filters = [f[0] for f in available_filters if len(f) == 2 or f[2]]
# save the results of submitting the timeline form to the session
if 'update' in req.args:
for filter in available_filters:
key = 'timeline.filter.%s' % filter[0]
if filter[0] in req.args:
req.session[key] = '1'
elif key in req.session:
del req.session[key]
stop = fromdate
start = to_datetime(stop.replace(tzinfo=None) - \
timedelta(days=daysback + 1),
req.tz)
# create author include and exclude sets
include = set()
exclude = set()
for match in self._authors_pattern.finditer(authors):
name = (match.group(2) or match.group(3) or match.group(4)).lower()
if match.group(1):
exclude.add(name)
else:
include.add(name)
# gather all events for the given period of time
events = []
for provider in self.event_providers:
try:
for event in provider.get_timeline_events(req, start, stop,
filters) or []:
# Check for 0.10 events
author = (event[2 if len(event) < 6 else 4] or '').lower()
if (not include or author in include) \
and not author in exclude:
events.append(self._event_data(provider, event))
except Exception, e: # cope with a failure of that provider
self._provider_failure(e, req, provider, filters,
[f[0] for f in available_filters])
# prepare sorted global list
events = sorted(events, key=lambda e: e['date'], reverse=True)
if maxrows:
events = events[:maxrows]
data['events'] = events
if format == 'rss':
data['email_map'] = Chrome(self.env).get_email_map()
rss_context = web_context(req, absurls=True)
rss_context.set_hints(wiki_flavor='html', shorten_lines=False)
data['context'] = rss_context
return 'timeline.rss', data, 'application/rss+xml'
else:
req.session.set('timeline.daysback', daysback,
self.default_daysback)
req.session.set('timeline.authors', authors, '')
# store lastvisit
if events and not revisit:
lastviewed = to_utimestamp(events[0]['date'])
req.session['timeline.lastvisit'] = max(lastvisit, lastviewed)
req.session['timeline.nextlastvisit'] = lastvisit
html_context = web_context(req)
html_context.set_hints(wiki_flavor='oneliner',
shorten_lines=self.abbreviated_messages)
data['context'] = html_context
add_stylesheet(req, 'common/css/timeline.css')
rss_href = req.href.timeline([(f, 'on') for f in filters],
daysback=90, max=50, authors=authors,
format='rss')
add_link(req, 'alternate', auth_link(req, rss_href), _('RSS Feed'),
'application/rss+xml', 'rss')
Chrome(self.env).add_jquery_ui(req)
for filter_ in available_filters:
data['filters'].append({'name': filter_[0], 'label': filter_[1],
'enabled': filter_[0] in filters})
# Navigation to the previous/next period of 'daysback' days
previous_start = fromdate.replace(tzinfo=None) - \
timedelta(days=daysback + 1)
previous_start = format_date(to_datetime(previous_start, req.tz),
format='%Y-%m-%d', tzinfo=req.tz)
add_link(req, 'prev', req.href.timeline(from_=previous_start,
authors=authors,
daysback=daysback),
_('Previous Period'))
if today - fromdate > timedelta(days=0):
next_start = fromdate.replace(tzinfo=None) + \
timedelta(days=daysback + 1)
next_start = format_date(to_datetime(next_start, req.tz),
format='%Y-%m-%d', tzinfo=req.tz)
add_link(req, 'next', req.href.timeline(from_=next_start,
authors=authors,
daysback=daysback),
_('Next Period'))
prevnext_nav(req, _('Previous Period'), _('Next Period'))
return 'timeline.html', data, None
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.timeline', 'templates')]
# IRequestFilter methods
def pre_process_request(self, req, handler):
return handler
def post_process_request(self, req, template, data, content_type):
if data:
def pretty_dateinfo(date, format=None, dateonly=False):
absolute = user_time(req, format_datetime, date)
relative = pretty_timedelta(date)
if not format:
format = req.session.get('dateinfo',
Chrome(self.env).default_dateinfo_format)
if format == 'absolute':
if dateonly:
label = absolute
elif req.lc_time == 'iso8601':
label = _("at %(iso8601)s", iso8601=absolute)
else:
label = _("on %(date)s at %(time)s",
date=user_time(req, format_date, date),
time=user_time(req, format_time, date))
title = _("See timeline %(relativetime)s ago",
relativetime=relative)
else:
label = _("%(relativetime)s ago", relativetime=relative) \
if not dateonly else relative
title = _("See timeline at %(absolutetime)s",
absolutetime=absolute)
return self.get_timeline_link(req, date, label,
precision='second', title=title)
def dateinfo(date):
return pretty_dateinfo(date, format='relative', dateonly=True)
data['pretty_dateinfo'] = pretty_dateinfo
data['dateinfo'] = dateinfo
return template, data, content_type
# IWikiSyntaxProvider methods
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
def link_resolver(formatter, ns, target, label):
path, query, fragment = split_url_into_path_query_fragment(target)
precision = None
time = path.split("T", 1)
if len(time) > 1:
time = time[1].split("Z")[0]
if len(time) >= 6:
precision = 'seconds'
elif len(time) >= 4:
precision = 'minutes'
elif len(time) >= 2:
precision = 'hours'
try:
return self.get_timeline_link(formatter.req,
parse_date(path, utc),
label, precision, query, fragment)
except TracError, e:
return tag.a(label, title=to_unicode(e.message),
class_='timeline missing')
yield ('timeline', link_resolver)
# Public methods
def get_timeline_link(self, req, date, label=None, precision='hours',
query=None, fragment=None, title=None):
iso_date = format_datetime(date, 'iso8601', req.tz)
href = req.href.timeline(from_=iso_date, precision=precision)
return tag.a(label or iso_date, class_='timeline',
title=title or _("See timeline at %(absolutetime)s",
absolutetime=iso_date),
href=concat_path_query_fragment(href, query, fragment))
# Internal methods
def _event_data(self, provider, event):
"""Compose the timeline event date from the event tuple and prepared
provider methods"""
if len(event) == 6: # 0.10 events
kind, url, title, date, author, markup = event
data = {'url': url, 'title': title, 'description': markup}
render = lambda field, context: data.get(field)
else: # 0.11 events
if len(event) == 5: # with special provider
kind, date, author, data, provider = event
else:
kind, date, author, data = event
render = lambda field, context: \
provider.render_timeline_event(context, field, event)
if not isinstance(date, datetime):
date = datetime.fromtimestamp(date, utc)
dateuid = to_utimestamp(date)
return {'kind': kind, 'author': author, 'date': date,
'dateuid': dateuid, 'render': render, 'event': event,
'data': data, 'provider': provider}
def _provider_failure(self, exc, req, ep, current_filters, all_filters):
"""Raise a TracError exception explaining the failure of a provider.
At the same time, the message will contain a link to the timeline
without the filters corresponding to the guilty event provider `ep`.
"""
self.log.error('Timeline event provider failed: %s',
exception_to_unicode(exc, traceback=True))
ep_kinds = dict((f[0], f[1])
for f in ep.get_timeline_filters(req) or [])
ep_filters = set(ep_kinds.keys())
current_filters = set(current_filters)
other_filters = set(current_filters) - ep_filters
if not other_filters:
other_filters = set(all_filters) - ep_filters
args = [(a, req.args.get(a)) for a in ('from', 'format', 'max',
'daysback')]
href = req.href.timeline(args + [(f, 'on') for f in other_filters])
# TRANSLATOR: ...want to see the 'other kinds of events' from... (link)
other_events = tag.a(_('other kinds of events'), href=href)
raise TracError(tag(
tag.p(tag_("Event provider %(name)s failed for filters "
"%(kinds)s: ",
name=tag.tt(ep.__class__.__name__),
kinds=', '.join('"%s"' % ep_kinds[f] for f in
current_filters & ep_filters)),
tag.b(exception_to_unicode(exc)), class_='message'),
tag.p(tag_("You may want to see the %(other_events)s from the "
"Timeline or notify your Trac administrator about the "
"error (detailed information was written to the log).",
other_events=other_events))))
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
apache/bloodhound | https://github.com/apache/bloodhound/blob/c3e31294e68af99d4e040e64fbdf52394344df9e/trac/trac/timeline/__init__.py | trac/trac/timeline/__init__.py | from trac.timeline.api import *
| python | Apache-2.0 | c3e31294e68af99d4e040e64fbdf52394344df9e | 2026-01-05T07:12:43.622011Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.