sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
zulip/zulip:zerver/tests/test_default_channels.py | import orjson
from zerver.actions.default_streams import (
do_add_default_stream,
do_add_streams_to_default_stream_group,
do_change_default_stream_group_description,
do_change_default_stream_group_name,
do_create_default_stream_group,
do_remove_default_stream,
do_remove_default_stream_group,
do_remove_streams_from_default_stream_group,
lookup_default_stream_groups,
)
from zerver.actions.streams import do_change_stream_group_based_setting
from zerver.actions.user_groups import check_add_user_group
from zerver.lib.default_streams import (
get_default_stream_ids_for_realm,
get_slim_realm_default_streams,
)
from zerver.lib.exceptions import JsonableError
from zerver.lib.streams import ensure_stream
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import queries_captured
from zerver.lib.user_groups import is_user_in_group
from zerver.models import DefaultStream, DefaultStreamGroup, Realm, Stream, UserProfile
from zerver.models.realms import get_realm
from zerver.models.streams import get_default_stream_groups
class DefaultStreamTest(ZulipTestCase):
def get_default_stream_names(self, realm: Realm) -> set[str]:
streams = get_slim_realm_default_streams(realm.id)
return {s.name for s in streams}
def test_query_count(self) -> None:
DefaultStream.objects.all().delete()
realm = get_realm("zulip")
new_stream_ids = set()
for i in range(5):
stream = ensure_stream(realm, f"stream {i}", acting_user=None)
new_stream_ids.add(stream.id)
do_add_default_stream(stream)
with queries_captured() as queries:
default_stream_ids = get_default_stream_ids_for_realm(realm.id)
self.assert_length(queries, 1)
self.assertEqual(default_stream_ids, new_stream_ids)
def test_add_and_remove_default_stream(self) -> None:
realm = get_realm("zulip")
stream = ensure_stream(realm, "Added stream", acting_user=None)
orig_stream_names = self.get_default_stream_names(realm)
do_add_default_stream(stream)
new_stream_names = self.get_default_stream_names(realm)
added_stream_names = new_stream_names - orig_stream_names
self.assertEqual(added_stream_names, {"Added stream"})
# idempotency--2nd call to add_default_stream should be a noop
do_add_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), new_stream_names)
# start removing
do_remove_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), orig_stream_names)
# idempotency--2nd call to remove_default_stream should be a noop
do_remove_default_stream(stream)
self.assertEqual(self.get_default_stream_names(realm), orig_stream_names)
def test_api_calls(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
DefaultStream.objects.filter(realm=user_profile.realm).delete()
stream_name = "stream ADDED via api"
stream = ensure_stream(user_profile.realm, stream_name, acting_user=None)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_error(result, "Must be an organization administrator")
self.assertFalse(stream_name in self.get_default_stream_names(user_profile.realm))
self.set_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_success(result)
self.assertTrue(stream_name in self.get_default_stream_names(user_profile.realm))
# look for it
self.subscribe(user_profile, stream_name)
payload = dict(
include_public="true",
include_default="true",
)
result = self.client_get("/json/streams", payload)
streams = self.assert_json_success(result)["streams"]
default_streams = {stream["name"] for stream in streams if stream["is_default"]}
self.assertEqual(default_streams, {stream_name})
other_streams = {stream["name"] for stream in streams if not stream["is_default"]}
self.assertGreater(len(other_streams), 0)
# and remove it
result = self.client_delete("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_success(result)
self.assertFalse(stream_name in self.get_default_stream_names(user_profile.realm))
# Test admin can't access unsubscribed private stream for adding.
stream_name = "private_stream"
stream = self.make_stream(stream_name, invite_only=True)
self.subscribe(self.example_user("iago"), stream_name)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_error(result, "Invalid channel ID")
# Test admin can't add subscribed private stream also.
self.subscribe(user_profile, stream_name)
result = self.client_post("/json/default_streams", dict(stream_id=stream.id))
self.assert_json_error(result, "Private channels cannot be made default.")
def test_add_and_remove_stream_as_default(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
stream = self.make_stream("stream", realm=realm)
stream_id = self.subscribe(user_profile, "stream").id
params = {
"is_default_stream": orjson.dumps(True).decode(),
}
self.assertFalse(is_user_in_group(stream.can_administer_channel_group_id, user_profile))
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "You do not have permission to administer this channel.")
self.assertFalse(stream_id in get_default_stream_ids_for_realm(realm.id))
# User still needs to be an admin to add a default channel.
self.set_user_role(user_profile, UserProfile.ROLE_MEMBER)
user_profile_group = check_add_user_group(
realm, "user_profile_group", [user_profile], acting_user=user_profile
)
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
user_profile_group,
acting_user=user_profile,
)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "You do not have permission to change default channels.")
self.assertFalse(stream_id in get_default_stream_ids_for_realm(realm.id))
self.set_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
self.assertTrue(stream_id in get_default_stream_ids_for_realm(realm.id))
params = {
"is_private": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "A default channel cannot be private.")
stream.refresh_from_db()
self.assertFalse(stream.invite_only)
params = {
"is_private": orjson.dumps(True).decode(),
"is_default_stream": orjson.dumps(False).decode(),
}
# User still needs to be an admin to remove a default channel.
self.set_user_role(user_profile, UserProfile.ROLE_MEMBER)
self.assertTrue(is_user_in_group(stream.can_administer_channel_group_id, user_profile))
self.assertTrue(stream_id in get_default_stream_ids_for_realm(realm.id))
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_error(result, "You do not have permission to change default channels.")
self.assertTrue(stream_id in get_default_stream_ids_for_realm(realm.id))
self.set_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
result = self.client_patch(f"/json/streams/{stream_id}", params)
self.assert_json_success(result)
stream.refresh_from_db()
self.assertTrue(stream.invite_only)
self.assertFalse(stream_id in get_default_stream_ids_for_realm(realm.id))
stream_2 = self.make_stream("stream_2", realm=realm)
stream_2_id = self.subscribe(user_profile, "stream_2").id
bad_params = {
"is_default_stream": orjson.dumps(True).decode(),
"is_private": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{stream_2_id}", bad_params)
self.assert_json_error(result, "A default channel cannot be private.")
stream.refresh_from_db()
self.assertFalse(stream_2.invite_only)
self.assertFalse(stream_2_id in get_default_stream_ids_for_realm(realm.id))
private_stream = self.make_stream("private_stream", realm=realm, invite_only=True)
private_stream_id = self.subscribe(user_profile, "private_stream").id
params = {
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{private_stream_id}", params)
self.assert_json_error(result, "A default channel cannot be private.")
self.assertFalse(private_stream_id in get_default_stream_ids_for_realm(realm.id))
params = {
"is_private": orjson.dumps(False).decode(),
"is_default_stream": orjson.dumps(True).decode(),
}
result = self.client_patch(f"/json/streams/{private_stream_id}", params)
self.assert_json_success(result)
private_stream.refresh_from_db()
self.assertFalse(private_stream.invite_only)
self.assertTrue(private_stream_id in get_default_stream_ids_for_realm(realm.id))
class DefaultStreamGroupTest(ZulipTestCase):
def test_create_update_and_remove_default_stream_group(self) -> None:
realm = get_realm("zulip")
# Test creating new default stream group
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
streams = [
ensure_stream(realm, stream_name, acting_user=None)
for stream_name in ["stream1", "stream2", "stream3"]
]
def get_streams(group: DefaultStreamGroup) -> list[Stream]:
return list(group.streams.all().order_by("name"))
group_name = "group1"
description = "This is group1"
do_create_default_stream_group(realm, group_name, description, streams)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, description)
self.assertEqual(get_streams(default_stream_groups[0]), streams)
# Test adding streams to existing default stream group
group = lookup_default_stream_groups(["group1"], realm)[0]
new_stream_names = [
"stream4",
"stream5",
"stream6",
"stream7",
"stream8",
"stream9",
]
new_streams = [
ensure_stream(realm, new_stream_name, acting_user=None)
for new_stream_name in new_stream_names
]
streams += new_streams
do_add_streams_to_default_stream_group(realm, group, new_streams)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(get_streams(default_stream_groups[0]), streams)
# Test removing streams from existing default stream group
with self.assert_database_query_count(5):
do_remove_streams_from_default_stream_group(realm, group, new_streams)
remaining_streams = streams[0:3]
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(get_streams(default_stream_groups[0]), remaining_streams)
# Test changing default stream group description
new_description = "group1 new description"
do_change_default_stream_group_description(realm, group, new_description)
default_stream_groups = get_default_stream_groups(realm)
self.assertEqual(default_stream_groups[0].description, new_description)
self.assert_length(default_stream_groups, 1)
# Test changing default stream group name
new_group_name = "new group1"
do_change_default_stream_group_name(realm, group, new_group_name)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, new_group_name)
self.assertEqual(get_streams(default_stream_groups[0]), remaining_streams)
# Test removing default stream group
do_remove_default_stream_group(realm, group)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
# Test creating a default stream group which contains a default stream
do_add_default_stream(remaining_streams[0])
with self.assertRaisesRegex(
JsonableError, "'stream1' is a default channel and cannot be added to 'new group1'"
):
do_create_default_stream_group(
realm, new_group_name, "This is group1", remaining_streams
)
def test_api_calls(self) -> None:
self.login("hamlet")
user_profile = self.example_user("hamlet")
realm = user_profile.realm
self.set_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
# Test creating new default stream group
stream_names = ["stream1", "stream2", "stream3"]
group_name = "group1"
description = "This is group1"
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
streams = [
ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
]
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": group_name,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, description)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("id")), streams)
# Try adding the same streams to the group.
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": group_name,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(result, "Default channel group 'group1' already exists")
# Test adding streams to existing default stream group
group_id = default_stream_groups[0].id
new_stream_names = ["stream4", "stream5"]
new_streams = [
ensure_stream(realm, new_stream_name, acting_user=None)
for new_stream_name in new_stream_names
]
streams += new_streams
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Missing 'op' argument")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "invalid", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, 'Invalid value for "op". Specify one of "add" or "remove".')
result = self.client_patch(
"/json/default_stream_groups/12345/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Default channel group with id '12345' does not exist.")
result = self.client_patch(f"/json/default_stream_groups/{group_id}/streams", {"op": "add"})
self.assert_json_error(result, "Missing 'stream_names' argument")
do_add_default_stream(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "'stream4' is a default channel and cannot be added to 'group1'"
)
do_remove_default_stream(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("name")), streams)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "add", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "Channel 'stream4' is already present in default channel group 'group1'"
)
# Test removing streams from default stream group
result = self.client_patch(
"/json/default_stream_groups/12345/streams",
{"op": "remove", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(result, "Default channel group with id '12345' does not exist.")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps(["random stream name"]).decode()},
)
self.assert_json_error(result, "Invalid channel name 'random stream name'")
streams.remove(new_streams[0])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps([new_stream_names[0]]).decode()},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(list(default_stream_groups[0].streams.all().order_by("name")), streams)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams",
{"op": "remove", "stream_names": orjson.dumps(new_stream_names).decode()},
)
self.assert_json_error(
result, "Channel 'stream4' is not present in default channel group 'group1'"
)
# Test changing description of default stream group
new_description = "new group1 description"
result = self.client_patch(f"/json/default_stream_groups/{group_id}")
self.assert_json_error(result, 'You must pass "new_description" or "new_group_name".')
result = self.client_patch(
"/json/default_stream_groups/12345",
{"new_description": new_description},
)
self.assert_json_error(result, "Default channel group with id '12345' does not exist.")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_description": new_description},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, group_name)
self.assertEqual(default_stream_groups[0].description, new_description)
# Test changing name of default stream group
new_group_name = "new group1"
do_create_default_stream_group(realm, "group2", "", [])
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": "group2"},
)
self.assert_json_error(result, "Default channel group 'group2' already exists")
new_group = lookup_default_stream_groups(["group2"], realm)[0]
do_remove_default_stream_group(realm, new_group)
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": group_name},
)
self.assert_json_error(result, "This default channel group is already named 'group1'")
result = self.client_patch(
f"/json/default_stream_groups/{group_id}",
{"new_group_name": new_group_name},
)
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 1)
self.assertEqual(default_stream_groups[0].name, new_group_name)
self.assertEqual(default_stream_groups[0].description, new_description)
# Test deleting a default stream group
result = self.client_delete(f"/json/default_stream_groups/{group_id}")
self.assert_json_success(result)
default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0)
result = self.client_delete(f"/json/default_stream_groups/{group_id}")
self.assert_json_error(
result, f"Default channel group with id '{group_id}' does not exist."
)
def test_invalid_default_stream_group_name(self) -> None:
self.login("iago")
user_profile = self.example_user("iago")
realm = user_profile.realm
stream_names = ["stream1", "stream2", "stream3"]
description = "This is group1"
for stream_name in stream_names:
ensure_stream(realm, stream_name, acting_user=None)
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "",
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(result, "Invalid default channel group name ''")
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "x" * 100,
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(
result,
f"Default channel group name too long (limit: {DefaultStreamGroup.MAX_NAME_LENGTH} characters)",
)
result = self.client_post(
"/json/default_stream_groups/create",
{
"group_name": "abc\000",
"description": description,
"stream_names": orjson.dumps(stream_names).decode(),
},
)
self.assert_json_error(
result, "Default channel group name 'abc\000' contains NULL (0x00) characters."
)
# Also test that lookup_default_stream_groups raises an
# error if we pass it a bad name. This function is used
# during registration, but it's a bit heavy to do a full
# test of that.
with self.assertRaisesRegex(JsonableError, "Invalid default channel group invalid-name"):
lookup_default_stream_groups(["invalid-name"], realm)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_default_channels.py",
"license": "Apache License 2.0",
"lines": 467,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/tests/test_channel_access.py | from zerver.actions.streams import do_change_stream_group_based_setting, do_deactivate_stream
from zerver.actions.user_groups import check_add_user_group
from zerver.actions.users import do_change_user_role
from zerver.lib.exceptions import JsonableError
from zerver.lib.streams import (
access_stream_by_id,
access_stream_by_name,
bulk_can_access_stream_metadata_user_ids,
can_access_stream_history,
can_access_stream_metadata_user_ids,
ensure_stream,
user_has_content_access,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.types import UserGroupMembersData
from zerver.lib.user_groups import UserGroupMembershipDetails
from zerver.models import NamedUserGroup, Stream, UserProfile
from zerver.models.realms import get_realm
from zerver.models.streams import get_stream
from zerver.models.users import active_non_guest_user_ids, active_user_ids
class AccessStreamTest(ZulipTestCase):
def test_access_stream(self) -> None:
"""
A comprehensive security test for the access_stream_by_* API functions.
"""
# Create a private stream for which Hamlet is the only subscriber.
hamlet = self.example_user("hamlet")
stream_name = "new_private_stream"
self.login_user(hamlet)
self.subscribe_via_post(hamlet, [stream_name], invite_only=True)
stream = get_stream(stream_name, hamlet.realm)
othello = self.example_user("othello")
# Nobody can access a stream that doesn't exist
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(hamlet, 501232)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'invalid stream'"):
access_stream_by_name(hamlet, "invalid stream")
# Hamlet can access the private stream
(stream_ret, sub_ret) = access_stream_by_id(hamlet, stream.id)
self.assertEqual(stream.id, stream_ret.id)
assert sub_ret is not None
self.assertEqual(sub_ret.recipient.type_id, stream.id)
(stream_ret2, sub_ret2) = access_stream_by_name(hamlet, stream.name)
self.assertEqual(stream_ret.id, stream_ret2.id)
self.assertEqual(sub_ret, sub_ret2)
# Othello cannot access the private stream
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(othello, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(othello, stream.name)
# Both Othello and Hamlet can access a public stream that only
# Hamlet is subscribed to in this realm
public_stream_name = "public_stream"
self.subscribe_via_post(hamlet, [public_stream_name], invite_only=False)
public_stream = get_stream(public_stream_name, hamlet.realm)
access_stream_by_id(othello, public_stream.id)
access_stream_by_name(othello, public_stream.name)
access_stream_by_id(hamlet, public_stream.id)
access_stream_by_name(hamlet, public_stream.name)
# Archive channel to verify require_active_channel code path
do_deactivate_stream(public_stream, acting_user=hamlet)
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(hamlet, public_stream.id, require_active_channel=True)
access_stream_by_id(hamlet, public_stream.id, require_active_channel=False)
# Nobody can access a public stream in another realm
mit_realm = get_realm("zephyr")
mit_stream = ensure_stream(mit_realm, "mit_stream", invite_only=False, acting_user=None)
sipbtest = self.mit_user("sipbtest")
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(hamlet, mit_stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'mit_stream'"):
access_stream_by_name(hamlet, mit_stream.name)
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(sipbtest, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(sipbtest, stream.name)
def test_access_stream_allow_metadata_access_flag(self) -> None:
"""
A comprehensive security test for the access_stream_by_* API functions.
"""
# Create a private stream for which Hamlet is the only subscriber.
hamlet = self.example_user("hamlet")
stream_name = "new_private_stream"
self.login_user(hamlet)
self.subscribe_via_post(hamlet, [stream_name], invite_only=True)
stream = get_stream(stream_name, hamlet.realm)
othello = self.example_user("othello")
iago = self.example_user("iago")
polonius = self.example_user("polonius")
# Realm admin cannot access the private stream
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(iago, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(iago, stream.name)
# Realm admins can access private stream if
# require_content_access set to False
access_stream_by_id(iago, stream.id, require_content_access=False)
access_stream_by_name(iago, stream.name, require_content_access=False)
# Normal unsubscribed user cannot access a private stream
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(othello, stream.id)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(othello, stream.name)
# Normal unsubscribed user cannot access a private stream with
# require_content_access set to False
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(othello, stream.id, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(othello, stream.name, require_content_access=False)
polonius_and_othello_group = check_add_user_group(
othello.realm, "user_profile_group", [othello, polonius], acting_user=othello
)
nobody_group = NamedUserGroup.objects.get(
name="role:nobody", is_system_group=True, realm_for_sharding=othello.realm
)
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
polonius_and_othello_group,
acting_user=othello,
)
# Channel admins can access private stream if
# require_content_access is set to False
access_stream_by_id(othello, stream.id, require_content_access=False)
access_stream_by_name(othello, stream.name, require_content_access=False)
# Guest user who is a channel admin cannot access a stream via
# groups if they are not subscribed to it.
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(polonius, stream.id, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(polonius, stream.name, require_content_access=False)
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
nobody_group,
acting_user=othello,
)
do_change_stream_group_based_setting(
stream,
"can_add_subscribers_group",
polonius_and_othello_group,
acting_user=othello,
)
access_stream_by_id(othello, stream.id, require_content_access=False)
access_stream_by_name(othello, stream.name, require_content_access=False)
# Users in `can_add_subscribers_group` can access private
# stream if require_content_access is set to True
access_stream_by_id(othello, stream.id, require_content_access=True)
access_stream_by_name(othello, stream.name, require_content_access=True)
# Guest user who cannot access a stream via groups if they are
# part of `can_add_subscribers_group` but not subscribed to it.
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(polonius, stream.id, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(polonius, stream.name, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(polonius, stream.id, require_content_access=True)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(polonius, stream.name, require_content_access=True)
do_change_stream_group_based_setting(
stream,
"can_add_subscribers_group",
nobody_group,
acting_user=othello,
)
do_change_stream_group_based_setting(
stream,
"can_subscribe_group",
polonius_and_othello_group,
acting_user=othello,
)
access_stream_by_id(othello, stream.id, require_content_access=False)
access_stream_by_name(othello, stream.name, require_content_access=False)
# Users in `can_subscribe_group` can access private
# stream if require_content_access is set to True
access_stream_by_id(othello, stream.id, require_content_access=True)
access_stream_by_name(othello, stream.name, require_content_access=True)
# Guest user who cannot access a stream via groups if they are
# part of `can_subscribe_group` but not subscribed to it.
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(polonius, stream.id, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(polonius, stream.name, require_content_access=False)
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(polonius, stream.id, require_content_access=True)
with self.assertRaisesRegex(JsonableError, "Invalid channel name 'new_private_stream'"):
access_stream_by_name(polonius, stream.name, require_content_access=True)
def test_stream_access_by_guest(self) -> None:
guest_user_profile = self.example_user("polonius")
self.login_user(guest_user_profile)
stream_name = "public_stream_1"
stream = self.make_stream(stream_name, guest_user_profile.realm, invite_only=False)
# Guest user don't have access to unsubscribed public streams
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(guest_user_profile, stream.id)
# Guest user have access to subscribed public streams
self.subscribe(guest_user_profile, stream_name)
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
assert sub_ret is not None
self.assertEqual(stream.id, stream_ret.id)
self.assertEqual(sub_ret.recipient.type_id, stream.id)
stream_name = "private_stream_1"
stream = self.make_stream(stream_name, guest_user_profile.realm, invite_only=True)
# Obviously, a guest user doesn't have access to unsubscribed private streams either
with self.assertRaisesRegex(JsonableError, "Invalid channel ID"):
access_stream_by_id(guest_user_profile, stream.id)
# Guest user have access to subscribed private streams
self.subscribe(guest_user_profile, stream_name)
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
assert sub_ret is not None
self.assertEqual(stream.id, stream_ret.id)
self.assertEqual(sub_ret.recipient.type_id, stream.id)
stream_name = "web_public_stream"
stream = self.make_stream(stream_name, guest_user_profile.realm, is_web_public=True)
# Guest users have access to web-public streams even if they aren't subscribed.
(stream_ret, sub_ret) = access_stream_by_id(guest_user_profile, stream.id)
self.assertTrue(can_access_stream_history(guest_user_profile, stream))
assert sub_ret is None
self.assertEqual(stream.id, stream_ret.id)
def test_has_content_access(self) -> None:
guest_user = self.example_user("polonius")
aaron = self.example_user("aaron")
realm = guest_user.realm
web_public_stream = self.make_stream("web_public_stream", realm=realm, is_web_public=True)
private_stream = self.make_stream("private_stream", realm=realm, invite_only=True)
public_stream = self.make_stream("public_stream", realm=realm, invite_only=False)
# Even guest user should have access to web public channel.
self.assertEqual(
user_has_content_access(
guest_user,
web_public_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
True,
)
# User should have access to private channel if they are
# subscribed to it
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=True,
),
True,
)
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
False,
)
# Non guest user should have access to public channel
# regardless of their subscription to the channel.
self.assertEqual(
user_has_content_access(
aaron,
public_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=True,
),
True,
)
self.assertEqual(
user_has_content_access(
aaron,
public_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
True,
)
# Guest user should have access to public channel only if they
# are subscribed to it.
self.assertEqual(
user_has_content_access(
guest_user,
public_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
False,
)
self.assertEqual(
user_has_content_access(
guest_user,
public_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=True,
),
True,
)
# User should be able to access private channel if they are
# part of `can_add_subscribers_group` but not subscribed to the
# channel.
aaron_group_member_dict = UserGroupMembersData(
direct_members=[aaron.id], direct_subgroups=[]
)
do_change_stream_group_based_setting(
private_stream,
"can_add_subscribers_group",
aaron_group_member_dict,
acting_user=aaron,
)
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
True,
)
nobody_group = NamedUserGroup.objects.get(
name="role:nobody", realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
private_stream,
"can_add_subscribers_group",
nobody_group,
acting_user=aaron,
)
# User should be able to access private channel if they are
# part of `can_subscribe_group` but not subscribed to the
# channel.
do_change_stream_group_based_setting(
private_stream,
"can_subscribe_group",
aaron_group_member_dict,
acting_user=aaron,
)
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
True,
)
nobody_group = NamedUserGroup.objects.get(
name="role:nobody", realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
private_stream,
"can_subscribe_group",
nobody_group,
acting_user=aaron,
)
# User should not be able to access private channel if they are
# part of `can_administer_channel_group` but not subscribed to
# the channel.
do_change_stream_group_based_setting(
private_stream,
"can_administer_channel_group",
aaron_group_member_dict,
acting_user=aaron,
)
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=False,
),
False,
)
self.assertEqual(
user_has_content_access(
aaron,
private_stream,
user_group_membership_details=UserGroupMembershipDetails(
user_recursive_group_ids=None
),
is_subscribed=True,
),
True,
)
def test_can_access_stream_metadata_user_ids(self) -> None:
aaron = self.example_user("aaron")
cordelia = self.example_user("cordelia")
guest_user = self.example_user("polonius")
iago = self.example_user("iago")
desdemona = self.example_user("desdemona")
realm = aaron.realm
public_stream = self.make_stream("public_stream", realm, invite_only=False)
web_public_stream = self.make_stream("web_public_stream", realm, is_web_public=True)
nobody_system_group = NamedUserGroup.objects.get(
name="role:nobody", realm_for_sharding=realm, is_system_group=True
)
# Web public stream with no subscribers.
expected_web_public_user_ids = active_user_ids(realm.id)
self.assertCountEqual(
can_access_stream_metadata_user_ids(web_public_stream), expected_web_public_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[web_public_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[web_public_stream.id], expected_web_public_user_ids
)
# Public stream with no subscribers.
expected_public_user_ids = set(active_non_guest_user_ids(realm.id))
self.assertCountEqual(
can_access_stream_metadata_user_ids(public_stream), expected_public_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
# Public stream with 1 guest as a subscriber.
self.subscribe(guest_user, "public_stream")
expected_public_user_ids.add(guest_user.id)
self.assertCountEqual(
can_access_stream_metadata_user_ids(public_stream), expected_public_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
test_bot = self.create_test_bot("foo", desdemona)
expected_public_user_ids.add(test_bot.id)
private_stream = self.make_stream("private_stream", realm, invite_only=True)
# Nobody is subscribed yet for the private stream, only admin
# users will turn up for that stream. We will continue testing
# the existing public stream for the bulk function here on.
expected_private_user_ids = {iago.id, desdemona.id}
self.assertCountEqual(
can_access_stream_metadata_user_ids(private_stream), expected_private_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
# Bot with admin privileges should also be part of the result.
do_change_user_role(
test_bot, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=desdemona, notify=False
)
expected_private_user_ids.add(test_bot.id)
self.assertCountEqual(
can_access_stream_metadata_user_ids(private_stream), expected_private_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
# Subscriber should also be part of the result.
self.subscribe(aaron, "private_stream")
expected_private_user_ids.add(aaron.id)
self.assertCountEqual(
can_access_stream_metadata_user_ids(private_stream), expected_private_user_ids
)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
stream_permission_group_settings = set(Stream.stream_permission_group_settings.keys())
stream_permission_group_settings_not_granting_metadata_access = (
stream_permission_group_settings
- set(Stream.stream_permission_group_settings_granting_metadata_access)
)
for setting_name in stream_permission_group_settings_not_granting_metadata_access:
do_change_stream_group_based_setting(
private_stream,
setting_name,
UserGroupMembersData(direct_members=[cordelia.id], direct_subgroups=[]),
acting_user=cordelia,
)
with self.assert_database_query_count(4):
private_stream_metadata_user_ids = can_access_stream_metadata_user_ids(
private_stream
)
self.assertCountEqual(private_stream_metadata_user_ids, expected_private_user_ids)
with self.assert_database_query_count(6):
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
for setting_name in Stream.stream_permission_group_settings_granting_metadata_access:
do_change_stream_group_based_setting(
private_stream,
setting_name,
UserGroupMembersData(direct_members=[cordelia.id], direct_subgroups=[]),
acting_user=cordelia,
)
expected_private_user_ids.add(cordelia.id)
with self.assert_database_query_count(4):
private_stream_metadata_user_ids = can_access_stream_metadata_user_ids(
private_stream
)
self.assertCountEqual(private_stream_metadata_user_ids, expected_private_user_ids)
with self.assert_database_query_count(6):
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
do_change_stream_group_based_setting(
private_stream, setting_name, nobody_system_group, acting_user=cordelia
)
expected_private_user_ids.remove(cordelia.id)
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, private_stream]
)
self.assertCountEqual(
can_access_stream_metadata_user_ids(private_stream), expected_private_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
# Query count should not increase on fetching user ids for an
# additional public stream.
public_stream_2 = self.make_stream("public_stream_2", realm, invite_only=False)
with self.assert_database_query_count(6):
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, public_stream_2, private_stream]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream_2.id],
active_non_guest_user_ids(realm.id),
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
# Query count should not increase on fetching user ids for an
# additional private stream.
private_stream_2 = self.make_stream("private_stream_2", realm, invite_only=True)
self.subscribe(aaron, "private_stream_2")
with self.assert_database_query_count(6):
bulk_access_stream_metadata_user_ids = bulk_can_access_stream_metadata_user_ids(
[public_stream, public_stream_2, private_stream, private_stream_2]
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream.id], expected_public_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[public_stream_2.id],
active_non_guest_user_ids(realm.id),
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream.id], expected_private_user_ids
)
self.assertCountEqual(
bulk_access_stream_metadata_user_ids[private_stream_2.id], expected_private_user_ids
)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_channel_access.py",
"license": "Apache License 2.0",
"lines": 603,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/tests/test_channel_permissions.py | from typing import TypedDict
import orjson
from typing_extensions import override
from zerver.actions.channel_folders import check_add_channel_folder
from zerver.actions.realm_settings import (
do_change_realm_permission_group_setting,
do_set_realm_property,
)
from zerver.actions.streams import (
do_change_stream_group_based_setting,
do_change_stream_permission,
do_deactivate_stream,
)
from zerver.actions.user_groups import add_subgroups_to_user_group, check_add_user_group
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import get_subscription
from zerver.lib.types import UserGroupMembersData
from zerver.lib.user_groups import get_group_setting_value_for_api
from zerver.models import NamedUserGroup, Recipient, Stream, Subscription, UserProfile
from zerver.models.groups import SystemGroups
from zerver.models.realms import get_realm
from zerver.models.streams import StreamTopicsPolicyEnum, get_stream
class ChannelSubscriptionPermissionTest(ZulipTestCase):
@override
def setUp(self) -> None:
super().setUp()
self.test_user = self.example_user("hamlet")
def test_realm_settings_for_subscribing_other_users(self) -> None:
"""
You can't subscribe other people to streams if you are a guest or your account is not old
enough.
"""
user_profile = self.example_user("cordelia")
invitee_user_id = user_profile.id
realm = user_profile.realm
admins_group = NamedUserGroup.objects.get(
name=SystemGroups.ADMINISTRATORS, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", admins_group, acting_user=None
)
# User should be allowed to add subscribers when creating the
# channel even if they don't have realm wide permission to
# add other subscribers to a channel.
self.set_user_role(self.test_user, UserProfile.ROLE_MODERATOR)
result = self.subscribe_via_post(
self.test_user,
["stream1"],
# Creator will be part of `can_administer_channel_group` by
# default for a new channel. We set it to admin, so that we
# can test for errors in the next piece of this test.
{
"principals": orjson.dumps([invitee_user_id]).decode(),
"can_administer_channel_group": admins_group.id,
},
allow_fail=True,
)
self.assert_json_success(result)
result = self.subscribe_via_post(
self.test_user,
["stream1"],
{"principals": orjson.dumps([self.example_user("aaron").id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", nobody_group, acting_user=None
)
do_change_stream_group_based_setting(
get_stream("stream1", realm),
"can_add_subscribers_group",
nobody_group,
acting_user=user_profile,
)
# Admins have a special permission to administer every channel
# they have access to. This also grants them access to add
# subscribers.
self.set_user_role(self.test_user, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.subscribe_via_post(
self.test_user, ["stream1"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
moderators_group = NamedUserGroup.objects.get(
name=SystemGroups.MODERATORS, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", moderators_group, acting_user=None
)
self.set_user_role(self.test_user, UserProfile.ROLE_MEMBER)
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self.assertFalse(self.test_user.is_provisional_member)
# User will be able to add subscribers to a newly created
# stream without any realm wide permissions. We create this
# stream programmatically so that we can test for errors for an
# existing stream.
self.make_stream("stream2")
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
self.set_user_role(self.test_user, UserProfile.ROLE_MODERATOR)
self.subscribe_via_post(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.unsubscribe(user_profile, "stream2")
members_group = NamedUserGroup.objects.get(
name=SystemGroups.MEMBERS, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", members_group, acting_user=None
)
self.set_user_role(self.test_user, UserProfile.ROLE_GUEST)
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Not allowed for guest users")
self.set_user_role(self.test_user, UserProfile.ROLE_MEMBER)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([self.test_user.id, invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
full_members_group = NamedUserGroup.objects.get(
name=SystemGroups.FULL_MEMBERS, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", full_members_group, acting_user=None
)
do_set_realm_property(realm, "waiting_period_threshold", 100000, acting_user=None)
self.assertTrue(user_profile.is_provisional_member)
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
# Moderators, Admins and owners are always full members.
self.assertTrue(user_profile.is_provisional_member)
self.set_user_role(self.test_user, UserProfile.ROLE_MODERATOR)
self.assertFalse(self.test_user.is_provisional_member)
self.set_user_role(self.test_user, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.assertFalse(self.test_user.is_provisional_member)
self.set_user_role(self.test_user, UserProfile.ROLE_REALM_OWNER)
self.assertFalse(self.test_user.is_provisional_member)
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.subscribe_via_post(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.unsubscribe(user_profile, "stream2")
named_user_group = check_add_user_group(
realm, "named_user_group", [self.test_user], acting_user=self.test_user
)
do_change_realm_permission_group_setting(
realm,
"can_add_subscribers_group",
named_user_group,
acting_user=None,
)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
anonymous_group = self.create_or_update_anonymous_group_for_setting([self.test_user], [])
do_change_realm_permission_group_setting(
realm,
"can_add_subscribers_group",
anonymous_group,
acting_user=None,
)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
def test_stream_settings_for_subscribing_other_users(self) -> None:
user_profile = self.example_user("cordelia")
invitee_user_id = user_profile.id
realm = user_profile.realm
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", nobody_group, acting_user=None
)
# User will be able to add subscribers to a newly created
# stream without any realm wide permissions. We create this
# stream programmatically so that we can test for errors for an
# existing stream.
do_change_stream_group_based_setting(
self.make_stream("stream1"),
"can_add_subscribers_group",
nobody_group,
acting_user=user_profile,
)
result = self.subscribe_via_post(
self.test_user,
["stream1"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
# Admins have a special permission to administer every channel
# they have access to. This also grants them access to add
# subscribers.
self.set_user_role(self.test_user, UserProfile.ROLE_REALM_ADMINISTRATOR)
result = self.subscribe_via_post(
self.test_user, ["stream1"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.assert_json_success(result)
self.set_user_role(self.test_user, UserProfile.ROLE_MEMBER)
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self.assertFalse(self.test_user.is_provisional_member)
# User will be able to add subscribers to a newly created
# stream without any realm wide permissions. We create this
# stream programmatically so that we can test for errors for an
# existing stream.
stream2 = self.make_stream("stream2")
moderators_group = NamedUserGroup.objects.get(
name=SystemGroups.MODERATORS, realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
stream2, "can_add_subscribers_group", moderators_group, acting_user=user_profile
)
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
self.set_user_role(self.test_user, UserProfile.ROLE_MODERATOR)
self.subscribe_via_post(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.unsubscribe(user_profile, "stream2")
members_group = NamedUserGroup.objects.get(
name=SystemGroups.MEMBERS, realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
stream2, "can_add_subscribers_group", members_group, acting_user=user_profile
)
self.set_user_role(self.test_user, UserProfile.ROLE_GUEST)
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Not allowed for guest users")
self.set_user_role(self.test_user, UserProfile.ROLE_MEMBER)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([self.test_user.id, invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
# User should be able to subscribe other users if they have
# permissions to administer the channel.
do_change_stream_group_based_setting(
stream2, "can_add_subscribers_group", nobody_group, acting_user=user_profile
)
do_change_stream_group_based_setting(
stream2, "can_administer_channel_group", members_group, acting_user=user_profile
)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([self.test_user.id, invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
do_change_stream_group_based_setting(
stream2, "can_administer_channel_group", nobody_group, acting_user=user_profile
)
full_members_group = NamedUserGroup.objects.get(
name=SystemGroups.FULL_MEMBERS, realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
stream2, "can_add_subscribers_group", full_members_group, acting_user=user_profile
)
do_set_realm_property(realm, "waiting_period_threshold", 100000, acting_user=None)
result = self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
do_set_realm_property(realm, "waiting_period_threshold", 0, acting_user=None)
self.subscribe_via_post(
self.test_user, ["stream2"], {"principals": orjson.dumps([invitee_user_id]).decode()}
)
self.unsubscribe(user_profile, "stream2")
named_user_group = check_add_user_group(
realm, "named_user_group", [self.test_user], acting_user=self.test_user
)
do_change_stream_group_based_setting(
stream2,
"can_add_subscribers_group",
named_user_group,
acting_user=user_profile,
)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
anonymous_group_member_dict = UserGroupMembersData(
direct_members=[self.test_user.id], direct_subgroups=[]
)
do_change_stream_group_based_setting(
stream2,
"can_add_subscribers_group",
anonymous_group_member_dict,
acting_user=user_profile,
)
self.subscribe_via_post(
self.test_user,
["stream2"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
)
self.unsubscribe(user_profile, "stream2")
private_stream = self.make_stream("private_stream", invite_only=True)
do_change_stream_group_based_setting(
private_stream, "can_add_subscribers_group", members_group, acting_user=user_profile
)
result = self.subscribe_via_post(
self.test_user,
["private_stream"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
)
self.assert_json_success(result)
do_change_stream_group_based_setting(
private_stream, "can_add_subscribers_group", nobody_group, acting_user=user_profile
)
self.unsubscribe(user_profile, "private_stream")
do_change_stream_group_based_setting(
private_stream,
"can_administer_channel_group",
members_group,
acting_user=user_profile,
)
result = self.subscribe_via_post(
self.test_user,
["private_stream"],
{"principals": orjson.dumps([invitee_user_id]).decode()},
allow_fail=True,
)
self.assert_json_error(result, "Unable to access channel (private_stream).")
def test_stream_settings_for_subscribing(self) -> None:
realm = get_realm("zulip")
stream = self.make_stream("public_stream")
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=realm, is_system_group=True
)
def check_user_can_subscribe(user: UserProfile, error_msg: str | None = None) -> None:
result = self.subscribe_via_post(
user,
[stream.name],
allow_fail=error_msg is not None,
)
if error_msg:
self.assert_json_error(result, error_msg)
return
self.assertTrue(
Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id=stream.id,
user_profile=user,
).exists()
)
# Unsubscribe user again for testing next case.
self.unsubscribe(user, stream.name)
desdemona = self.example_user("desdemona")
shiva = self.example_user("shiva")
hamlet = self.example_user("hamlet")
polonius = self.example_user("polonius")
othello = self.example_user("othello")
do_change_realm_permission_group_setting(
realm, "can_add_subscribers_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", nobody_group, acting_user=othello
)
check_user_can_subscribe(desdemona)
check_user_can_subscribe(shiva)
check_user_can_subscribe(hamlet)
check_user_can_subscribe(othello)
check_user_can_subscribe(polonius, "Not allowed for guest users")
setting_group_member_dict = UserGroupMembersData(
direct_members=[polonius.id], direct_subgroups=[]
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(polonius, "Not allowed for guest users")
do_change_stream_group_based_setting(
stream, "can_subscribe_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(polonius, "Not allowed for guest users")
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(polonius, "Not allowed for guest users")
stream = self.subscribe(self.example_user("iago"), "private_stream", invite_only=True)
check_user_can_subscribe(desdemona, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello, f"Unable to access channel ({stream.name}).")
owners_group = NamedUserGroup.objects.get(
name=SystemGroups.OWNERS, realm_for_sharding=realm, is_system_group=True
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", owners_group, acting_user=othello
)
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(desdemona)
hamletcharacters_group = NamedUserGroup.objects.get(
name="hamletcharacters", realm_for_sharding=realm
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", hamletcharacters_group, acting_user=othello
)
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(desdemona, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet)
setting_group_member_dict = UserGroupMembersData(
direct_members=[othello.id], direct_subgroups=[owners_group.id]
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello)
check_user_can_subscribe(desdemona)
# Users can also subscribe if they are allowed to subscribe other users.
do_change_stream_group_based_setting(
stream, "can_subscribe_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello)
check_user_can_subscribe(desdemona)
# Users cannot subscribe if they belong to can_administer_channel_group but
# do not belong to any of can_subscribe_group and can_add_subscribers_group.
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", nobody_group, acting_user=othello
)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", setting_group_member_dict, acting_user=othello
)
check_user_can_subscribe(shiva, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(hamlet, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(othello, f"Unable to access channel ({stream.name}).")
check_user_can_subscribe(desdemona, f"Unable to access channel ({stream.name}).")
def test_can_remove_subscribers_group(self) -> None:
realm = get_realm("zulip")
iago = self.example_user("iago")
leadership_group = check_add_user_group(
realm,
"leadership",
[iago, self.example_user("shiva")],
acting_user=iago,
)
hamlet = self.example_user("hamlet")
managers_group = check_add_user_group(realm, "managers", [hamlet], acting_user=hamlet)
add_subgroups_to_user_group(managers_group, [leadership_group], acting_user=None)
cordelia = self.example_user("cordelia")
othello = self.example_user("othello")
shiva = self.example_user("shiva")
public_stream = self.make_stream("public_stream")
def check_unsubscribing_user(
user: UserProfile,
can_remove_subscribers_group: NamedUserGroup | UserGroupMembersData,
expect_fail: bool = False,
stream_list: list[Stream] | None = None,
skip_changing_group_setting: bool = False,
) -> None:
self.login_user(user)
if stream_list is None:
stream_list = [public_stream]
for stream in stream_list:
self.subscribe(cordelia, stream.name)
if not skip_changing_group_setting:
do_change_stream_group_based_setting(
stream,
"can_remove_subscribers_group",
can_remove_subscribers_group,
acting_user=user,
)
stream_name_list = [stream.name for stream in stream_list]
result = self.client_delete(
"/json/users/me/subscriptions",
{
"subscriptions": orjson.dumps(stream_name_list).decode(),
"principals": orjson.dumps([cordelia.id]).decode(),
},
)
if expect_fail:
self.assert_json_error(result, "Insufficient permission")
return
json = self.assert_json_success(result)
self.assert_length(json["removed"], len(stream_name_list))
self.assert_length(json["not_removed"], 0)
check_unsubscribing_user(
self.example_user("hamlet"),
leadership_group,
expect_fail=True,
stream_list=[public_stream],
)
check_unsubscribing_user(iago, leadership_group, stream_list=[public_stream])
# Owners can unsubscribe others when they are not a member of
# the allowed group since owners have the permission to
# administer all channels.
check_unsubscribing_user(
self.example_user("desdemona"), leadership_group, stream_list=[public_stream]
)
check_unsubscribing_user(
othello,
managers_group,
expect_fail=True,
stream_list=[public_stream],
)
check_unsubscribing_user(shiva, managers_group, stream_list=[public_stream])
check_unsubscribing_user(hamlet, managers_group, stream_list=[public_stream])
private_stream = self.make_stream("private_stream", invite_only=True)
self.subscribe(self.example_user("hamlet"), private_stream.name)
# Users are not allowed to unsubscribe others from streams they
# don't have metadata access to even if they are a member of the
# allowed group. In this case, a non-admin who is not subscribed
# to the channel does not have metadata access to the channel.
check_unsubscribing_user(
shiva,
leadership_group,
expect_fail=True,
stream_list=[private_stream],
)
check_unsubscribing_user(iago, leadership_group, stream_list=[private_stream])
# Users are allowed to unsubscribe others from private streams
# they have access to if they are a member of the allowed
# group. In this case, a user with the role `owner` is
# subscribed to the relevant channel.
check_unsubscribing_user(
self.example_user("desdemona"), leadership_group, stream_list=[private_stream]
)
self.subscribe(shiva, private_stream.name)
check_unsubscribing_user(shiva, leadership_group, stream_list=[private_stream])
# Test changing setting to anonymous group.
setting_group_member_dict = UserGroupMembersData(
direct_members=[hamlet.id],
direct_subgroups=[leadership_group.id],
)
check_unsubscribing_user(
othello,
setting_group_member_dict,
expect_fail=True,
stream_list=[private_stream],
)
check_unsubscribing_user(hamlet, setting_group_member_dict, stream_list=[private_stream])
check_unsubscribing_user(iago, setting_group_member_dict, stream_list=[private_stream])
check_unsubscribing_user(shiva, setting_group_member_dict, stream_list=[private_stream])
# Owners can unsubscribe others when they are not a member of
# the allowed group since admins have the permission to
# administer all channels.
setting_group_member_dict = UserGroupMembersData(
direct_members=[hamlet.id],
direct_subgroups=[],
)
check_unsubscribing_user(
self.example_user("desdemona"), setting_group_member_dict, stream_list=[private_stream]
)
check_unsubscribing_user(iago, setting_group_member_dict, stream_list=[private_stream])
# A user who is part of can_administer_channel_group should be
# able to unsubscribe other users even if that user is not part
# of can_remove_subscribers_group. And even if that user is not
# subscribed to the channel in question.
with self.assertRaises(Subscription.DoesNotExist):
get_subscription(private_stream.name, othello)
check_unsubscribing_user(othello, setting_group_member_dict, expect_fail=True)
othello_group_member_dict = UserGroupMembersData(
direct_members=[othello.id], direct_subgroups=[]
)
private_stream_2 = self.make_stream("private_stream_2")
do_change_stream_group_based_setting(
private_stream,
"can_administer_channel_group",
othello_group_member_dict,
acting_user=othello,
)
# If the user can only administer one of the channels, the test
# should fail.
check_unsubscribing_user(
othello,
setting_group_member_dict,
expect_fail=True,
stream_list=[private_stream, private_stream_2],
)
# User can administer both channels now.
do_change_stream_group_based_setting(
private_stream_2,
"can_administer_channel_group",
othello_group_member_dict,
acting_user=othello,
)
check_unsubscribing_user(
othello, setting_group_member_dict, stream_list=[private_stream, private_stream_2]
)
shiva_group_member_dict = UserGroupMembersData(
direct_members=[shiva.id], direct_subgroups=[]
)
do_change_stream_group_based_setting(
private_stream,
"can_remove_subscribers_group",
shiva_group_member_dict,
acting_user=shiva,
)
self.subscribe(shiva, private_stream.name)
self.subscribe(shiva, private_stream_2.name)
# If the user can is present in the remove subscribers group of
# only one of the channels, the test should fail.
check_unsubscribing_user(
shiva,
setting_group_member_dict,
expect_fail=True,
stream_list=[private_stream, private_stream_2],
skip_changing_group_setting=True,
)
do_change_stream_group_based_setting(
private_stream_2,
"can_remove_subscribers_group",
shiva_group_member_dict,
acting_user=shiva,
)
check_unsubscribing_user(
shiva,
setting_group_member_dict,
stream_list=[private_stream, private_stream_2],
skip_changing_group_setting=True,
)
def test_change_stream_message_retention_days_requires_realm_owner(self) -> None:
user_profile = self.example_user("iago")
self.login_user(user_profile)
realm = user_profile.realm
stream = self.subscribe(user_profile, "stream_name1")
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_error(result, "Must be an organization owner")
self.set_user_role(user_profile, UserProfile.ROLE_REALM_OWNER)
result = self.client_patch(
f"/json/streams/{stream.id}", {"message_retention_days": orjson.dumps(2).decode()}
)
self.assert_json_success(result)
stream = get_stream("stream_name1", realm)
self.assertEqual(stream.message_retention_days, 2)
class PermissionCheckConfigDict(TypedDict):
setting_group: NamedUserGroup | UserGroupMembersData
users_with_permission: list[UserProfile]
users_without_permission: list[UserProfile]
class ChannelAdministerPermissionTest(ZulipTestCase):
@override
def setUp(self) -> None:
super().setUp()
self.realm = get_realm("zulip")
self.admin = self.example_user("iago")
self.moderator = self.example_user("shiva")
self.guest = self.example_user("polonius")
self.hamletcharacters_group = NamedUserGroup.objects.get(
name="hamletcharacters", realm_for_sharding=self.realm
)
self.moderators_group = NamedUserGroup.objects.get(
name=SystemGroups.MODERATORS, realm_for_sharding=self.realm, is_system_group=True
)
self.nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=self.realm, is_system_group=True
)
self.members_group = NamedUserGroup.objects.get(
name=SystemGroups.MEMBERS, realm_for_sharding=self.realm, is_system_group=True
)
def do_test_updating_channel(
self, stream: Stream, property_name: str, new_value: str | int | bool
) -> None:
hamlet = self.example_user("hamlet")
prospero = self.example_user("prospero")
# For some properties, name of the field in Stream model
# is different from parameter name used in API request.
api_parameter_name_dict = dict(
name="new_name",
deactivated="is_archived",
)
api_parameter_name = property_name
if property_name in api_parameter_name_dict:
api_parameter_name = api_parameter_name_dict[property_name]
data = {}
if property_name == "topics_policy":
data[api_parameter_name] = StreamTopicsPolicyEnum(new_value).name
elif not isinstance(new_value, str):
data[api_parameter_name] = orjson.dumps(new_value).decode()
else:
data[api_parameter_name] = new_value
default_error_msg = "You do not have permission to administer this channel."
def check_channel_property_update(user: UserProfile, error_msg: str | None = None) -> None:
old_value = getattr(stream, property_name)
if property_name == "deactivated" and new_value is True:
# There is a separate endpoint for deactivating streams.
result = self.api_delete(user, f"/api/v1/streams/{stream.id}")
else:
result = self.api_patch(user, f"/api/v1/streams/{stream.id}", info=data)
if error_msg is not None:
self.assert_json_error(result, error_msg)
return
self.assert_json_success(result)
stream.refresh_from_db()
self.assertEqual(getattr(stream, property_name), new_value)
# Reset to original value.
setattr(stream, property_name, old_value)
stream.save(update_fields=[property_name])
anonymous_group_dict = UserGroupMembersData(
direct_members=[prospero.id, self.guest.id], direct_subgroups=[]
)
group_permission_checks: list[PermissionCheckConfigDict] = [
# Check admin can always administer channel.
PermissionCheckConfigDict(
setting_group=self.nobody_group,
users_without_permission=[self.moderator],
users_with_permission=[self.admin],
),
# Check case when can_administer_channel_group is set to a system group.
PermissionCheckConfigDict(
setting_group=self.moderators_group,
users_without_permission=[hamlet],
users_with_permission=[self.moderator],
),
# Check case when can_administer_channel_group is set to a user-defined group.
PermissionCheckConfigDict(
setting_group=self.hamletcharacters_group,
users_without_permission=[self.moderator],
users_with_permission=[hamlet],
),
# Check case when can_administer_channel_group is set to an anonymous group.
PermissionCheckConfigDict(
setting_group=anonymous_group_dict,
users_without_permission=[self.moderator, hamlet],
users_with_permission=[prospero],
),
]
for check_config in group_permission_checks:
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
check_config["setting_group"],
acting_user=self.admin,
)
for user in check_config["users_without_permission"]:
error_msg = default_error_msg
check_channel_property_update(user, error_msg=error_msg)
for user in check_config["users_with_permission"]:
check_channel_property_update(user)
# Check guests cannot update property even when they belong
# to "can_administer_channel_group".
check_channel_property_update(self.guest, error_msg="Invalid channel ID")
self.subscribe(self.guest, stream.name)
check_channel_property_update(self.guest, error_msg=default_error_msg)
self.unsubscribe(self.guest, stream.name)
# Check for permission in unsubscribed private streams.
if stream.invite_only:
self.unsubscribe(self.admin, stream.name)
self.unsubscribe(prospero, stream.name)
self.unsubscribe(hamlet, stream.name)
# Hamlet does not have metadata access.
check_channel_property_update(hamlet, error_msg="Invalid channel ID")
# Admins always have metadata access and administering permission.
check_channel_property_update(self.admin)
# Prospero has metadata access by being in can_administer_channel_group.
check_channel_property_update(prospero)
# Re-subscribe users for next tests.
self.subscribe(self.admin, stream.name)
self.subscribe(prospero, stream.name)
self.subscribe(hamlet, stream.name)
def test_administering_permission_for_updating_channel(self) -> None:
"""
This test is only for checking permission to update basic channel
properties like name, description, folder and permission to unarchive
the channel. Other things like permission to update group settings and
channel privacy are tested separately.
"""
public_stream = self.make_stream("test stream")
private_stream = self.make_stream("private_stream", invite_only=True)
self.subscribe(self.admin, private_stream.name)
self.subscribe(self.moderator, private_stream.name)
self.subscribe(self.example_user("hamlet"), private_stream.name)
self.subscribe(self.example_user("prospero"), private_stream.name)
channel_folder = check_add_channel_folder(
self.realm, "Frontend", "", acting_user=self.admin
)
for stream in [public_stream, private_stream]:
self.do_test_updating_channel(stream, "name", "Renamed stream")
self.do_test_updating_channel(stream, "description", "Edited stream description")
self.do_test_updating_channel(stream, "folder_id", channel_folder.id)
self.do_test_updating_channel(
stream, "topics_policy", StreamTopicsPolicyEnum.allow_empty_topic.value
)
self.do_test_updating_channel(stream, "deactivated", True)
do_deactivate_stream(stream, acting_user=None)
self.do_test_updating_channel(stream, "deactivated", False)
def check_channel_privacy_update(
self, user: UserProfile, property_name: str, new_value: bool, error_msg: str | None = None
) -> None:
stream = get_stream("test_stream", user.realm)
data = {}
old_values = {
"invite_only": stream.invite_only,
"is_web_public": stream.is_web_public,
"history_public_to_subscribers": stream.history_public_to_subscribers,
}
if property_name == "invite_only":
data["is_private"] = orjson.dumps(new_value).decode()
else:
data[property_name] = orjson.dumps(new_value).decode()
result = self.api_patch(user, f"/api/v1/streams/{stream.id}", info=data)
if error_msg is not None:
self.assert_json_error(result, error_msg)
return
self.assert_json_success(result)
stream.refresh_from_db()
self.assertEqual(getattr(stream, property_name), new_value)
# Reset to original value.
do_change_stream_permission(
stream,
**old_values,
acting_user=self.admin,
)
def do_test_updating_channel_privacy(self, property_name: str, new_value: bool) -> None:
hamlet = self.example_user("hamlet")
prospero = self.example_user("prospero")
stream = get_stream("test_stream", self.realm)
data = {}
if property_name == "invite_only":
data["is_private"] = orjson.dumps(new_value).decode()
else:
data[property_name] = orjson.dumps(new_value).decode()
default_error_msg = "You do not have permission to administer this channel."
anonymous_group_dict = UserGroupMembersData(
direct_members=[prospero.id, self.guest.id], direct_subgroups=[]
)
group_permission_checks: list[PermissionCheckConfigDict] = [
# Check admin can always administer channel.
PermissionCheckConfigDict(
setting_group=self.nobody_group,
users_without_permission=[self.moderator],
users_with_permission=[self.admin],
),
# Check case when can_administer_channel_group is set to a system group.
PermissionCheckConfigDict(
setting_group=self.moderators_group,
users_without_permission=[hamlet],
users_with_permission=[self.moderator],
),
# Check case when can_administer_channel_group is set to a user-defined group.
PermissionCheckConfigDict(
setting_group=self.hamletcharacters_group,
users_without_permission=[self.moderator],
users_with_permission=[hamlet],
),
# Check case when can_administer_channel_group is set to an anonymous group.
PermissionCheckConfigDict(
setting_group=anonymous_group_dict,
users_without_permission=[self.moderator, hamlet],
users_with_permission=[prospero],
),
]
for check_config in group_permission_checks:
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
check_config["setting_group"],
acting_user=self.admin,
)
for user in check_config["users_without_permission"]:
self.check_channel_privacy_update(user, property_name, new_value, default_error_msg)
for user in check_config["users_with_permission"]:
self.check_channel_privacy_update(user, property_name, new_value)
# Check guests cannot update property even when they belong
# to "can_administer_channel_group".
self.check_channel_privacy_update(
self.guest, property_name, new_value, error_msg="Invalid channel ID"
)
self.subscribe(self.guest, stream.name)
self.check_channel_privacy_update(self.guest, property_name, new_value, default_error_msg)
self.unsubscribe(self.guest, stream.name)
def test_administering_permission_for_updating_channel_privacy(self) -> None:
stream = self.make_stream("test_stream")
# Give permission to create web-public channels to everyone, so
# that we can check administering permissions easily, although
# we do not do not allow members to create web-public channels
# in production.
do_change_realm_permission_group_setting(
self.realm, "can_create_web_public_channel_group", self.members_group, acting_user=None
)
# Test making a public stream private with protected history.
self.do_test_updating_channel_privacy("invite_only", True)
# Test making a public stream web-public.
self.do_test_updating_channel_privacy("is_web_public", True)
do_change_stream_permission(
stream,
invite_only=True,
history_public_to_subscribers=False,
is_web_public=False,
acting_user=self.admin,
)
self.subscribe(self.admin, stream.name)
self.subscribe(self.moderator, stream.name)
self.subscribe(self.example_user("hamlet"), stream.name)
self.subscribe(self.example_user("prospero"), stream.name)
# Test making a private stream with protected history public.
self.do_test_updating_channel_privacy("invite_only", False)
def test_permission_for_updating_privacy_of_unsubscribed_private_channel(self) -> None:
hamlet = self.example_user("hamlet")
stream = self.make_stream("test_stream", invite_only=True)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", self.members_group, acting_user=self.admin
)
error_msg = "Channel content access is required."
self.check_channel_privacy_update(self.admin, "invite_only", False, error_msg)
self.check_channel_privacy_update(self.moderator, "invite_only", False, error_msg)
self.check_channel_privacy_update(hamlet, "invite_only", False, error_msg)
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", self.moderators_group, acting_user=self.admin
)
self.check_channel_privacy_update(hamlet, "invite_only", False, error_msg=error_msg)
self.check_channel_privacy_update(self.admin, "invite_only", False)
self.check_channel_privacy_update(self.moderator, "invite_only", False)
# Users who are part of can_subscribe_group get content access
# to a private stream even if they are not subscribed to it.
do_change_stream_group_based_setting(
stream, "can_subscribe_group", self.hamletcharacters_group, acting_user=self.admin
)
self.check_channel_privacy_update(hamlet, "invite_only", False)
def check_channel_group_setting_update(
self, user: UserProfile, property_name: str, error_msg: str | None = None
) -> None:
stream = get_stream("test_stream", user.realm)
new_value = self.hamletcharacters_group.id
data = {}
data[property_name] = orjson.dumps({"new": new_value}).decode()
old_value = getattr(stream, property_name)
# old_value is stored as UserGroupMembersData dict if the
# setting is set to an anonymous group and a NamedUserGroup
# object otherwise, so that we can pass it directly to
# do_change_stream_group_based_setting.
if not hasattr(old_value, "named_user_group"):
old_value = get_group_setting_value_for_api(old_value)
else:
old_value = old_value.named_user_group
result = self.api_patch(user, f"/api/v1/streams/{stream.id}", info=data)
if error_msg is not None:
self.assert_json_error(result, error_msg)
return
self.assert_json_success(result)
stream.refresh_from_db()
self.assertEqual(getattr(stream, property_name + "_id"), new_value)
# Reset to original value.
do_change_stream_group_based_setting(
stream, property_name, old_value, acting_user=self.example_user("iago")
)
def do_test_updating_channel_group_settings(self, property_name: str) -> None:
hamlet = self.example_user("hamlet")
prospero = self.example_user("prospero")
stream = get_stream("test_stream", self.realm)
default_error_msg = "You do not have permission to administer this channel."
anonymous_group_dict = UserGroupMembersData(
direct_members=[prospero.id, self.guest.id], direct_subgroups=[]
)
group_permission_checks: list[PermissionCheckConfigDict] = [
# Check admin can always administer channel.
PermissionCheckConfigDict(
setting_group=self.nobody_group,
users_without_permission=[self.moderator],
users_with_permission=[self.admin],
),
# Check case when can_administer_channel_group is set to a system group.
PermissionCheckConfigDict(
setting_group=self.moderators_group,
users_without_permission=[hamlet],
users_with_permission=[self.moderator],
),
# Check case when can_administer_channel_group is set to a user-defined group.
PermissionCheckConfigDict(
setting_group=self.hamletcharacters_group,
users_without_permission=[self.moderator],
users_with_permission=[hamlet],
),
# Check case when can_administer_channel_group is set to an anonymous group.
PermissionCheckConfigDict(
setting_group=anonymous_group_dict,
users_without_permission=[self.moderator, hamlet],
users_with_permission=[prospero],
),
]
for check_config in group_permission_checks:
do_change_stream_group_based_setting(
stream,
"can_administer_channel_group",
check_config["setting_group"],
acting_user=self.admin,
)
for user in check_config["users_without_permission"]:
self.check_channel_group_setting_update(user, property_name, default_error_msg)
for user in check_config["users_with_permission"]:
self.check_channel_group_setting_update(user, property_name)
# Check guests cannot update property even when they belong
# to "can_administer_channel_group".
self.check_channel_group_setting_update(
self.guest, property_name, error_msg="Invalid channel ID"
)
self.subscribe(self.guest, stream.name)
self.check_channel_group_setting_update(self.guest, property_name, default_error_msg)
self.unsubscribe(self.guest, stream.name)
def do_test_updating_group_settings_for_unsubscribed_private_channels(
self, property_name: str
) -> None:
# If stream is private, test which permissions require having
# content access to the channel.
hamlet = self.example_user("hamlet")
stream = get_stream("test_stream", self.realm)
self.assertTrue(stream.invite_only)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", self.members_group, acting_user=self.admin
)
if property_name not in Stream.stream_permission_group_settings_requiring_content_access:
# Users without content access can modify properties not in
# stream_permission_group_settings_requiring_content_access.
self.check_channel_group_setting_update(self.admin, property_name)
self.check_channel_group_setting_update(self.moderator, property_name)
self.check_channel_group_setting_update(hamlet, property_name)
return
error_msg = "Channel content access is required."
# Even realm and channel admins need content access to
# a private channel to update the permissions in
# stream_permission_group_settings_requiring_content_access.
self.check_channel_group_setting_update(self.admin, property_name, error_msg=error_msg)
self.check_channel_group_setting_update(self.moderator, property_name, error_msg=error_msg)
self.check_channel_group_setting_update(hamlet, property_name, error_msg=error_msg)
# Users who are part of can_add_subscribers_group get content access
# to a private stream even if they are not subscribed to it.
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", self.moderators_group, acting_user=self.admin
)
self.check_channel_group_setting_update(hamlet, property_name, error_msg=error_msg)
self.check_channel_group_setting_update(self.admin, property_name)
self.check_channel_group_setting_update(self.moderator, property_name)
# Users who are part of can_subscribe_group get content access
# to a private stream even if they are not subscribed to it.
do_change_stream_group_based_setting(
stream, "can_subscribe_group", self.hamletcharacters_group, acting_user=self.admin
)
self.check_channel_group_setting_update(hamlet, property_name)
# Reset the setting values to "Nobody" group.
do_change_stream_group_based_setting(
stream, "can_add_subscribers_group", self.nobody_group, acting_user=self.admin
)
do_change_stream_group_based_setting(
stream, "can_subscribe_group", self.nobody_group, acting_user=self.admin
)
def test_administering_permission_for_updating_channel_group_settings(self) -> None:
stream = self.make_stream("test_stream")
hamlet = self.example_user("hamlet")
prospero = self.example_user("prospero")
do_change_realm_permission_group_setting(
hamlet.realm,
"can_set_delete_message_policy_group",
self.members_group,
acting_user=None,
)
for setting_name in Stream.stream_permission_group_settings:
self.do_test_updating_channel_group_settings(setting_name)
# Test changing group settings for a private stream when user is
# subscribed to the stream.
do_change_stream_permission(
stream,
invite_only=True,
history_public_to_subscribers=True,
is_web_public=False,
acting_user=self.admin,
)
for user in [self.admin, self.moderator, hamlet, prospero]:
self.subscribe(user, stream.name)
for setting_name in Stream.stream_permission_group_settings:
self.do_test_updating_channel_group_settings(setting_name)
# Unsubscribe user from private stream to test gaining
# content access from group settings.
for user in [self.admin, self.moderator, hamlet, prospero]:
self.unsubscribe(user, stream.name)
for setting_name in Stream.stream_permission_group_settings:
self.do_test_updating_group_settings_for_unsubscribed_private_channels(setting_name)
def test_realm_permission_to_update_topics_policy(self) -> None:
hamlet = self.example_user("hamlet")
stream = self.make_stream("test_stream")
def check_channel_topics_policy_update(user: UserProfile, allow_fail: bool = False) -> None:
data = {}
data["topics_policy"] = StreamTopicsPolicyEnum.allow_empty_topic.name
result = self.api_patch(user, f"/api/v1/streams/{stream.id}", info=data)
if allow_fail:
self.assert_json_error(result, "Insufficient permission")
return
self.assert_json_success(result)
stream.refresh_from_db()
self.assertEqual(stream.topics_policy, StreamTopicsPolicyEnum.allow_empty_topic.value)
# Reset to original value
stream.topics_policy = StreamTopicsPolicyEnum.inherit.value
stream.save()
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", self.nobody_group, acting_user=self.admin
)
do_change_realm_permission_group_setting(
self.realm, "can_set_topics_policy_group", self.nobody_group, acting_user=None
)
# Admins can always update topics_policy.
check_channel_topics_policy_update(self.admin)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", self.members_group, acting_user=self.admin
)
check_channel_topics_policy_update(self.moderator, allow_fail=True)
check_channel_topics_policy_update(hamlet, allow_fail=True)
# Test when can_set_topics_policy_group is set to a user-defined group.
do_change_realm_permission_group_setting(
self.realm, "can_set_topics_policy_group", self.hamletcharacters_group, acting_user=None
)
check_channel_topics_policy_update(self.admin)
check_channel_topics_policy_update(self.moderator, allow_fail=True)
check_channel_topics_policy_update(hamlet)
# Test when can_set_topics_policy_group is set to an anonymous group.
anonymous_group = self.create_or_update_anonymous_group_for_setting(
direct_members=[hamlet], direct_subgroups=[self.moderators_group]
)
do_change_realm_permission_group_setting(
self.realm, "can_set_topics_policy_group", anonymous_group, acting_user=None
)
check_channel_topics_policy_update(self.admin)
check_channel_topics_policy_update(self.moderator)
check_channel_topics_policy_update(hamlet)
def test_can_set_delete_message_policy_group(self) -> None:
user = self.example_user("hamlet")
iago = self.example_user("iago")
realm = user.realm
stream = get_stream("Verona", realm)
owners_system_group = NamedUserGroup.objects.get(
realm_for_sharding=realm, name=SystemGroups.OWNERS, is_system_group=True
)
moderators_system_group = NamedUserGroup.objects.get(
realm_for_sharding=realm, name=SystemGroups.MODERATORS, is_system_group=True
)
members_system_group = NamedUserGroup.objects.get(
realm_for_sharding=realm, name=SystemGroups.MEMBERS, is_system_group=True
)
do_change_realm_permission_group_setting(
realm,
"can_set_delete_message_policy_group",
moderators_system_group,
acting_user=None,
)
do_change_stream_group_based_setting(
stream, "can_administer_channel_group", members_system_group, acting_user=iago
)
# Only moderators can change channel-level delete permissions.
# Hamlet is not a moderator.
subscriptions = [{"name": "new_test_stream"}]
result = self.subscribe_via_post(
user,
subscriptions,
subdomain="zulip",
extra_post_data={
"can_delete_any_message_group": orjson.dumps(owners_system_group.id).decode()
},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
result = self.subscribe_via_post(
user,
subscriptions,
subdomain="zulip",
extra_post_data={
"can_delete_own_message_group": orjson.dumps(owners_system_group.id).decode()
},
allow_fail=True,
)
self.assert_json_error(result, "Insufficient permission")
self.login("hamlet")
result = self.client_patch(
f"/json/streams/{stream.id}",
{
"can_delete_any_message_group": orjson.dumps(
{
"new": {
"direct_members": [user.id],
"direct_subgroups": [
owners_system_group.id,
moderators_system_group.id,
],
}
}
).decode(),
"can_delete_own_message_group": orjson.dumps(
{
"new": {
"direct_members": [user.id],
"direct_subgroups": [
owners_system_group.id,
moderators_system_group.id,
],
}
}
).decode(),
},
)
self.assert_json_error(result, "Insufficient permission")
moderator = self.example_user("shiva")
# Shiva is a moderator.
result = self.subscribe_via_post(
moderator,
subscriptions,
subdomain="zulip",
extra_post_data={
"can_delete_any_message_group": orjson.dumps(owners_system_group.id).decode()
},
allow_fail=True,
)
self.assert_json_success(result)
result = self.subscribe_via_post(
moderator,
subscriptions,
subdomain="zulip",
extra_post_data={
"can_delete_own_message_group": orjson.dumps(owners_system_group.id).decode()
},
allow_fail=True,
)
self.assert_json_success(result)
self.login("shiva")
result = self.client_patch(
f"/json/streams/{stream.id}",
{
"can_delete_any_message_group": orjson.dumps(
{
"new": {
"direct_members": [user.id],
"direct_subgroups": [
owners_system_group.id,
moderators_system_group.id,
],
}
}
).decode(),
"can_delete_own_message_group": orjson.dumps(
{
"new": {
"direct_members": [user.id],
"direct_subgroups": [
owners_system_group.id,
moderators_system_group.id,
],
}
}
).decode(),
},
)
self.assert_json_success(result)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_channel_permissions.py",
"license": "Apache License 2.0",
"lines": 1298,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/tests/test_channel_creation.py | import orjson
from zerver.actions.channel_folders import check_add_channel_folder
from zerver.actions.realm_settings import (
do_change_realm_permission_group_setting,
do_change_realm_plan_type,
do_set_realm_property,
)
from zerver.actions.user_groups import check_add_user_group
from zerver.lib.default_streams import get_default_stream_ids_for_realm
from zerver.lib.exceptions import JsonableError
from zerver.lib.message import UnreadStreamInfo, aggregate_unread_data, get_raw_unread_data
from zerver.lib.streams import (
StreamDict,
create_stream_if_needed,
create_streams_if_needed,
ensure_stream,
list_to_streams,
)
from zerver.lib.test_classes import ZulipTestCase, get_topic_messages
from zerver.lib.test_helpers import reset_email_visibility_to_everyone_in_zulip_realm
from zerver.lib.types import UserGroupMembersData, UserGroupMembersDict
from zerver.models import (
Message,
NamedUserGroup,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.models.groups import SystemGroups
from zerver.models.realms import get_realm
from zerver.models.streams import StreamTopicsPolicyEnum, get_stream
from zerver.models.users import active_non_guest_user_ids
class TestCreateStreams(ZulipTestCase):
def test_creating_streams(self) -> None:
stream_names = ["new1", "new2", "new3"]
stream_descriptions = ["des1", "des2", "des3"]
realm = get_realm("zulip")
iago = self.example_user("iago")
# Test stream creation events.
with self.capture_send_event_calls(expected_num_events=1) as events:
ensure_stream(realm, "Public stream", invite_only=False, acting_user=None)
self.assertEqual(events[0]["event"]["type"], "stream")
self.assertEqual(events[0]["event"]["op"], "create")
# Send public stream creation event to all active users.
self.assertEqual(events[0]["users"], active_non_guest_user_ids(realm.id))
self.assertEqual(events[0]["event"]["streams"][0]["name"], "Public stream")
self.assertEqual(events[0]["event"]["streams"][0]["stream_weekly_traffic"], None)
aaron_group = check_add_user_group(
realm, "aaron_group", [self.example_user("aaron")], acting_user=iago
)
prospero_group = check_add_user_group(
realm, "prospero_group", [self.example_user("prospero")], acting_user=iago
)
cordelia_group = check_add_user_group(
realm, "cordelia_group", [self.example_user("cordelia")], acting_user=iago
)
with self.capture_send_event_calls(expected_num_events=1) as events:
create_stream_if_needed(
realm,
"Private stream",
invite_only=True,
can_administer_channel_group=aaron_group,
can_add_subscribers_group=prospero_group,
can_subscribe_group=cordelia_group,
)
self.assertEqual(events[0]["event"]["type"], "stream")
self.assertEqual(events[0]["event"]["op"], "create")
# Send private stream creation event to only realm admins.
self.assert_length(events[0]["users"], 5)
self.assertCountEqual(
[
iago.id,
self.example_user("desdemona").id,
self.example_user("aaron").id,
self.example_user("prospero").id,
self.example_user("cordelia").id,
],
events[0]["users"],
)
self.assertEqual(events[0]["event"]["streams"][0]["name"], "Private stream")
self.assertEqual(events[0]["event"]["streams"][0]["stream_weekly_traffic"], None)
moderators_system_group = NamedUserGroup.objects.get(
name="role:moderators", realm_for_sharding=realm, is_system_group=True
)
new_streams, existing_streams = create_streams_if_needed(
realm,
[
{
"name": stream_name,
"description": stream_description,
"invite_only": True,
"message_retention_days": -1,
"can_remove_subscribers_group": moderators_system_group,
}
for (stream_name, stream_description) in zip(
stream_names, stream_descriptions, strict=False
)
],
)
self.assert_length(new_streams, 3)
self.assert_length(existing_streams, 0)
actual_stream_names = {stream.name for stream in new_streams}
self.assertEqual(actual_stream_names, set(stream_names))
actual_stream_descriptions = {stream.description for stream in new_streams}
self.assertEqual(actual_stream_descriptions, set(stream_descriptions))
for stream in new_streams:
self.assertTrue(stream.invite_only)
self.assertTrue(stream.message_retention_days == -1)
self.assertEqual(stream.can_remove_subscribers_group.id, moderators_system_group.id)
# Streams created where acting_user is None have no creator
self.assertIsNone(stream.creator_id)
new_streams, existing_streams = create_streams_if_needed(
realm,
[
{"name": stream_name, "description": stream_description, "invite_only": True}
for (stream_name, stream_description) in zip(
stream_names, stream_descriptions, strict=False
)
],
)
self.assert_length(new_streams, 0)
self.assert_length(existing_streams, 3)
actual_stream_names = {stream.name for stream in existing_streams}
self.assertEqual(actual_stream_names, set(stream_names))
actual_stream_descriptions = {stream.description for stream in existing_streams}
self.assertEqual(actual_stream_descriptions, set(stream_descriptions))
for stream in existing_streams:
self.assertTrue(stream.invite_only)
def test_create_api_multiline_description(self) -> None:
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
subscriptions = [{"name": "new_stream", "description": "multi\nline\ndescription"}]
result = self.subscribe_via_post(user, subscriptions, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(stream.description, "multi line description")
def test_create_api_topic_permalink_description(self) -> None:
user = self.example_user("iago")
realm = user.realm
self.login_user(user)
hamlet = self.example_user("hamlet")
core_stream = self.make_stream("core", realm, True, history_public_to_subscribers=True)
self.subscribe(hamlet, "core")
msg_id = self.send_stream_message(hamlet, "core", topic_name="testing")
# Test permalink not generated for description since user has no access to
# the channel.
subscriptions = [{"name": "stream1", "description": "#**core>testing**"}]
result = self.subscribe_via_post(user, subscriptions, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("stream1", realm)
self.assertEqual(stream.rendered_description, "<p>#<strong>core>testing</strong></p>")
self.subscribe(user, "core")
# Test permalink generated for the description since user now has access
# to the channel.
subscriptions = [{"name": "stream2", "description": "#**core>testing**"}]
result = self.subscribe_via_post(user, subscriptions, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("stream2", realm)
self.assertEqual(
stream.rendered_description,
f'<p><a class="stream-topic" data-stream-id="{core_stream.id}" href="/#narrow/channel/{core_stream.id}-core/topic/testing/with/{msg_id}">#{core_stream.name} > testing</a></p>',
)
def test_history_public_to_subscribers_on_stream_creation(self) -> None:
realm = get_realm("zulip")
stream_dicts: list[StreamDict] = [
{
"name": "publicstream",
"description": "Public stream with public history",
},
{"name": "webpublicstream", "description": "Web-public stream", "is_web_public": True},
{
"name": "privatestream",
"description": "Private stream with non-public history",
"invite_only": True,
},
{
"name": "privatewithhistory",
"description": "Private stream with public history",
"invite_only": True,
"history_public_to_subscribers": True,
},
{
"name": "publictrywithouthistory",
"description": "Public stream without public history (disallowed)",
"invite_only": False,
"history_public_to_subscribers": False,
},
]
created, existing = create_streams_if_needed(realm, stream_dicts)
self.assert_length(created, 5)
self.assert_length(existing, 0)
for stream in created:
if stream.name == "publicstream":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "webpublicstream":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "privatestream":
self.assertFalse(stream.history_public_to_subscribers)
if stream.name == "privatewithhistory":
self.assertTrue(stream.history_public_to_subscribers)
if stream.name == "publictrywithouthistory":
self.assertTrue(stream.history_public_to_subscribers)
def test_add_stream_as_default_on_stream_creation(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
realm = user_profile.realm
subscriptions = [
{"name": "default_stream", "description": "This stream is default for new users"}
]
result = self.subscribe_via_post(
user_profile,
subscriptions,
{"is_default_stream": "true"},
allow_fail=True,
subdomain="zulip",
)
self.assert_json_error(result, "Insufficient permission")
self.set_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
result = self.subscribe_via_post(
user_profile, subscriptions, {"is_default_stream": "true"}, subdomain="zulip"
)
self.assert_json_success(result)
default_stream = get_stream("default_stream", realm)
self.assertTrue(default_stream.id in get_default_stream_ids_for_realm(realm.id))
subscriptions = [
{
"name": "private_default_stream",
"description": "This stream is private and default for new users",
}
]
result = self.subscribe_via_post(
user_profile,
subscriptions,
{"is_default_stream": "true"},
invite_only=True,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_error(result, "A default channel cannot be private.")
def test_create_stream_using_add_channel(self) -> None:
user_profile = self.example_user("iago")
result = self.create_channel_via_post(user_profile, name="basketball")
self.assert_json_success(result)
stream = get_stream("basketball", user_profile.realm)
self.assertEqual(stream.name, "basketball")
cordelia = self.example_user("cordelia")
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=cordelia.realm, is_system_group=True
)
channel_folder = check_add_channel_folder(
user_profile.realm, "sports", "", acting_user=user_profile
)
result = self.create_channel_via_post(
user_profile,
name="testchannel",
extra_post_data=dict(
description="test channel",
can_administer_channel_group=orjson.dumps(
{
"direct_members": [cordelia.id],
"direct_subgroups": [nobody_group.id],
}
).decode(),
folder_id=orjson.dumps(channel_folder.id).decode(),
),
)
self.assert_json_success(result)
stream = get_stream("testchannel", user_profile.realm)
self.assertEqual(stream.name, "testchannel")
self.assertEqual(stream.description, "test channel")
# Confirm channel created notification message in channel events topic.
message = self.get_last_message()
self.assertEqual(message.recipient.type, Recipient.STREAM)
self.assertEqual(message.recipient.type_id, stream.id)
self.assertEqual(message.topic_name(), Realm.STREAM_EVENTS_NOTIFICATION_TOPIC_NAME)
self.assertEqual(message.sender_id, self.notification_bot(user_profile.realm).id)
expected_message_content = (
f"**Public** channel created by @_**{user_profile.full_name}|{user_profile.id}**. **Description:**\n"
"```` quote\ntest channel\n````"
)
self.assertEqual(message.content, expected_message_content)
# Test channel created notification is not sent if `send_channel_events_messages`
# realm setting is `False`.
do_set_realm_property(stream.realm, "send_channel_events_messages", False, acting_user=None)
result = self.create_channel_via_post(
user_profile,
name="testchannel2",
)
self.assert_json_success(result)
stream = get_stream("testchannel2", user_profile.realm)
self.assertEqual(stream.name, "testchannel2")
with self.assertRaises(Message.DoesNotExist):
Message.objects.get(recipient__type_id=stream.id)
# Creating an existing channel should return an error.
result = self.create_channel_via_post(user_profile, name="basketball")
self.assert_json_error(result, "Channel 'basketball' already exists", status_code=409)
# Test creating channel with no subscribers
post_data = {
"name": "no-sub-channel",
"subscribers": orjson.dumps([]).decode(),
}
result = self.api_post(
user_profile,
"/api/v1/channels/create",
post_data,
)
self.assert_json_success(result)
stream = get_stream("no-sub-channel", user_profile.realm)
self.assertEqual(stream.name, "no-sub-channel")
self.assertEqual(stream.subscriber_count, 0)
# Test creating channel with invalid user ID.
result = self.create_channel_via_post(
user_profile,
name="invalid-user-channel",
subscribers=[12, 1000],
)
self.assert_json_error(result, "No such user")
def test_channel_creation_miscellaneous(self) -> None:
iago = self.example_user("iago")
desdemona = self.example_user("desdemona")
cordelia = self.example_user("cordelia")
result = self.create_channel_via_post(
iago, extra_post_data={"message_retention_days": orjson.dumps(10).decode()}
)
self.assert_json_error(result, "Must be an organization owner")
result = self.create_channel_via_post(
desdemona,
[iago.id],
name="new_channel",
extra_post_data={"message_retention_days": orjson.dumps(10).decode()},
)
self.assert_json_success(result)
stream = get_stream("new_channel", desdemona.realm)
self.assertEqual(stream.name, "new_channel")
self.assertEqual(stream.message_retention_days, 10)
# Default streams can only be created by admins
result = self.create_channel_via_post(
iago,
name="testing_channel1",
extra_post_data={"is_default_stream": orjson.dumps(True).decode()},
invite_only=True,
)
self.assert_json_error(result, "A default channel cannot be private.")
result = self.create_channel_via_post(
iago,
name="testing_channel1",
extra_post_data={"is_default_stream": orjson.dumps(True).decode()},
invite_only=False,
)
self.assert_json_success(result)
stream = get_stream("testing_channel1", iago.realm)
self.assertEqual(stream.name, "testing_channel1")
self.assertTrue(stream.id in get_default_stream_ids_for_realm(iago.realm.id))
# Only org owners can create web public streams by default, if they are enabled.
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
self.assertFalse(desdemona.realm.has_web_public_streams())
result = self.create_channel_via_post(
desdemona,
name="testing_web_public_channel",
is_web_public=True,
)
self.assert_json_error(result, "Web-public channels are not enabled.")
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=True):
self.assertTrue(desdemona.realm.has_web_public_streams())
result = self.create_channel_via_post(
desdemona,
name="testing_web_public_channel",
is_web_public=True,
)
self.assert_json_success(result)
stream = get_stream("testing_web_public_channel", desdemona.realm)
self.assertEqual(stream.name, "testing_web_public_channel")
polonius = self.example_user("polonius")
result = self.create_channel_via_post(
polonius,
name="testing_channel4",
invite_only=True,
)
self.assert_json_error(result, "Not allowed for guest users")
# topics policy
owners = NamedUserGroup.objects.get(
name=SystemGroups.OWNERS, realm_for_sharding=cordelia.realm, is_system_group=True
)
do_change_realm_permission_group_setting(
cordelia.realm, "can_set_topics_policy_group", owners, acting_user=None
)
self.assertTrue(desdemona.can_set_topics_policy())
self.assertFalse(cordelia.can_set_topics_policy())
result = self.create_channel_via_post(
cordelia,
name="testing_channel4",
extra_post_data={
"topics_policy": orjson.dumps(
StreamTopicsPolicyEnum.disable_empty_topic.name
).decode()
},
)
self.assert_json_error(result, "Insufficient permission")
result = self.create_channel_via_post(
desdemona,
name="testing_channel4",
extra_post_data={
"topics_policy": orjson.dumps(
StreamTopicsPolicyEnum.disable_empty_topic.name
).decode()
},
)
self.assert_json_success(result)
stream = get_stream("testing_channel4", desdemona.realm)
self.assertEqual(stream.name, "testing_channel4")
self.assertEqual(stream.topics_policy, StreamTopicsPolicyEnum.disable_empty_topic.value)
def _test_group_based_settings_for_creating_channels(
self,
stream_policy: str,
*,
invite_only: bool,
is_web_public: bool,
) -> None:
def check_permission_to_create_channel(
user: UserProfile, stream_name: str, *, expect_fail: bool = False
) -> None:
result = self.create_channel_via_post(
user,
name=stream_name,
invite_only=invite_only,
is_web_public=is_web_public,
)
if expect_fail:
self.assert_json_error(result, "Insufficient permission")
return
self.assert_json_success(result)
self.assertTrue(
Stream.objects.filter(name=stream_name, realm_id=user.realm.id).exists()
)
cordelia = self.example_user("cordelia")
iago = self.example_user("iago")
desdemona = self.example_user("desdemona")
# System groups case
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=cordelia.realm, is_system_group=True
)
do_change_realm_permission_group_setting(
cordelia.realm, stream_policy, nobody_group, acting_user=None
)
check_permission_to_create_channel(
cordelia,
"testing_channel_group_permission1",
expect_fail=True,
)
check_permission_to_create_channel(
iago, "testing_channel_group_permission1", expect_fail=True
)
member_group = NamedUserGroup.objects.get(
name=SystemGroups.MEMBERS, realm_for_sharding=cordelia.realm, is_system_group=True
)
do_change_realm_permission_group_setting(
cordelia.realm, stream_policy, member_group, acting_user=None
)
check_permission_to_create_channel(
cordelia,
"testing_channel_group_permission1",
)
check_permission_to_create_channel(iago, "testing_channel_group_permission2")
admin_group = NamedUserGroup.objects.get(
name=SystemGroups.ADMINISTRATORS,
realm_for_sharding=cordelia.realm,
is_system_group=True,
)
do_change_realm_permission_group_setting(
cordelia.realm, stream_policy, admin_group, acting_user=None
)
check_permission_to_create_channel(
cordelia,
"testing_channel_group_permission3",
expect_fail=True,
)
check_permission_to_create_channel(iago, "testing_channel_group_permission3")
# User defined group case
leadership_group = check_add_user_group(
cordelia.realm, "Leadership", [desdemona], acting_user=desdemona
)
do_change_realm_permission_group_setting(
cordelia.realm, stream_policy, leadership_group, acting_user=None
)
check_permission_to_create_channel(
cordelia,
"testing_channel_group_permission4",
expect_fail=True,
)
check_permission_to_create_channel(
desdemona,
"testing_channel_group_permission4",
)
# Anonymous group case
staff_group = check_add_user_group(cordelia.realm, "Staff", [iago], acting_user=iago)
setting_group = self.create_or_update_anonymous_group_for_setting([cordelia], [staff_group])
do_change_realm_permission_group_setting(
cordelia.realm, stream_policy, setting_group, acting_user=None
)
check_permission_to_create_channel(
desdemona,
"testing_channel_group_permission5",
expect_fail=True,
)
check_permission_to_create_channel(iago, "testing_channel_group_permission5")
check_permission_to_create_channel(
cordelia,
"testing_channel_group_permission6",
)
def test_group_based_permissions_for_creating_private_streams(self) -> None:
self._test_group_based_settings_for_creating_channels(
"can_create_private_channel_group",
invite_only=True,
is_web_public=False,
)
def test_group_based_permissions_for_creating_public_streams(self) -> None:
self._test_group_based_settings_for_creating_channels(
"can_create_public_channel_group",
invite_only=False,
is_web_public=False,
)
def test_group_based_permissions_for_creating_web_public_streams(self) -> None:
self._test_group_based_settings_for_creating_channels(
"can_create_web_public_channel_group",
invite_only=False,
is_web_public=True,
)
def test_auto_mark_stream_created_message_as_read_for_stream_creator(self) -> None:
# This test relies on email == delivery_email for
# convenience.
reset_email_visibility_to_everyone_in_zulip_realm()
realm = Realm.objects.get(name="Zulip Dev")
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
aaron = self.example_user("aaron")
# Establish a stream for notifications.
announce_stream = ensure_stream(
realm, "announce", False, "announcements here.", acting_user=None
)
realm.new_stream_announcements_stream_id = announce_stream.id
realm.save(update_fields=["new_stream_announcements_stream_id"])
self.subscribe(iago, announce_stream.name)
self.subscribe(hamlet, announce_stream.name)
self.login_user(iago)
initial_message_count = Message.objects.count()
initial_usermessage_count = UserMessage.objects.count()
data = {
"subscriptions": '[{"name":"brand new stream","description":""}]',
"history_public_to_subscribers": "true",
"invite_only": "false",
"announce": "true",
"principals": orjson.dumps([iago.id, aaron.id, cordelia.id, hamlet.id]).decode(),
}
response = self.client_post("/json/users/me/subscriptions", data)
final_message_count = Message.objects.count()
final_usermessage_count = UserMessage.objects.count()
expected_response = {
"result": "success",
"msg": "",
"subscribed": {
"10": ["brand new stream"],
"11": ["brand new stream"],
"6": ["brand new stream"],
"8": ["brand new stream"],
},
"already_subscribed": {},
"new_subscription_messages_sent": True,
}
self.assertEqual(response.status_code, 200)
self.assertEqual(orjson.loads(response.content), expected_response)
# 2 messages should be created, one in announce and one in the new stream itself.
self.assertEqual(final_message_count - initial_message_count, 2)
# 4 UserMessages per subscriber: One for each of the subscribers, plus 1 for
# each user in the notifications stream.
announce_stream_subs = Subscription.objects.filter(recipient=announce_stream.recipient)
self.assertEqual(
final_usermessage_count - initial_usermessage_count, 4 + announce_stream_subs.count()
)
def get_unread_stream_data(user: UserProfile) -> list[UnreadStreamInfo]:
raw_unread_data = get_raw_unread_data(user)
aggregated_data = aggregate_unread_data(raw_unread_data, allow_empty_topic_name=True)
return aggregated_data["streams"]
stream_id = Stream.objects.get(name="brand new stream").id
iago_unread_messages = get_unread_stream_data(iago)
hamlet_unread_messages = get_unread_stream_data(hamlet)
# The stream creation messages should be unread for Hamlet
self.assert_length(hamlet_unread_messages, 2)
# According to the code in zerver/views/streams/add_subscriptions_backend
# the notification stream message is sent first, then the new stream's message.
self.assertEqual(hamlet_unread_messages[1]["stream_id"], stream_id)
# But it should be marked as read for Iago, the stream creator.
self.assert_length(iago_unread_messages, 0)
def test_can_administer_channel_group_default_on_stream_creation(self) -> None:
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
nobody_system_group = NamedUserGroup.objects.get(
name="role:nobody", realm_for_sharding=realm, is_system_group=True
)
stream, _created = create_stream_if_needed(
realm, "new stream without acting user", invite_only=True
)
self.assertEqual(stream.can_administer_channel_group.id, nobody_system_group.id)
stream, _created = create_stream_if_needed(
realm, "new stream with acting user", acting_user=user
)
self.assertCountEqual(stream.can_administer_channel_group.direct_members.all(), [user])
def test_can_create_topic_group_for_protected_history_streams(self) -> None:
"""
For channels with protected history, can_create_topic_group can only
be set to "role:everyone" system group.
"""
user = self.example_user("iago")
realm = user.realm
self.login_user(user)
everyone_system_group = NamedUserGroup.objects.get(
name=SystemGroups.EVERYONE, realm=realm, is_system_group=True
)
moderators_system_group = NamedUserGroup.objects.get(
name=SystemGroups.MODERATORS, realm=realm, is_system_group=True
)
hamletcharacters_group = NamedUserGroup.objects.get(name="hamletcharacters", realm=realm)
error_msg = "Unsupported parameter combination: history_public_to_subscribers, can_create_topic_group"
def check_create_protected_history_stream(
can_create_topic_group: int | UserGroupMembersData,
expect_fail: bool = False,
) -> None:
stream_name = "test_protected_history_stream"
subscriptions = [{"name": stream_name}]
extra_post_data = {
"history_public_to_subscribers": orjson.dumps(False).decode(),
"can_create_topic_group": orjson.dumps(can_create_topic_group).decode(),
}
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
invite_only=True,
subdomain="zulip",
allow_fail=expect_fail,
)
if expect_fail:
self.assert_json_error(result, error_msg)
else:
self.assert_json_success(result)
stream = get_stream(stream_name, realm)
self.assertFalse(stream.history_public_to_subscribers)
self.assertEqual(stream.can_create_topic_group_id, everyone_system_group.id)
# Delete the created stream so that we can create stream
# with same name for further cases.
stream.delete()
# Test creating channel using "/channels/create" endpoint as well.
result = self.create_channel_via_post(
user,
name=stream_name,
extra_post_data=extra_post_data,
invite_only=True,
)
if expect_fail:
self.assert_json_error(result, error_msg)
return
self.assert_json_success(result)
stream = get_stream(stream_name, realm)
self.assertFalse(stream.history_public_to_subscribers)
self.assertEqual(stream.can_create_topic_group_id, everyone_system_group.id)
# Delete the created stream so that we can create stream
# with same name for further cases.
stream.delete()
# Testing for everyone group.
check_create_protected_history_stream(everyone_system_group.id)
# Testing for a system group.
check_create_protected_history_stream(moderators_system_group.id, expect_fail=True)
# Testing for a user defined group.
check_create_protected_history_stream(hamletcharacters_group.id, expect_fail=True)
# Testing for an anonymous group.
check_create_protected_history_stream(
UserGroupMembersData(
direct_members=[user.id], direct_subgroups=[moderators_system_group.id]
),
expect_fail=True,
)
# Testing for an anonymous group without members and
# only everyone group as subgroup.
check_create_protected_history_stream(
UserGroupMembersData(direct_members=[], direct_subgroups=[everyone_system_group.id]),
)
def do_test_permission_setting_on_stream_creation(self, setting_name: str) -> None:
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
moderators_system_group = NamedUserGroup.objects.get(
name="role:moderators", realm_for_sharding=realm, is_system_group=True
)
permission_config = Stream.stream_permission_group_settings[setting_name]
subscriptions = [{"name": "new_stream", "description": "New stream"}]
extra_post_data = {}
extra_post_data[setting_name] = orjson.dumps(moderators_system_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, moderators_system_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
subscriptions = [{"name": "new_stream", "description": "New stream"}]
result = self.subscribe_via_post(user, subscriptions, subdomain="zulip")
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
if permission_config.default_group_name == "channel_creator":
self.assertEqual(list(getattr(stream, setting_name).direct_members.all()), [user])
self.assertEqual(
list(getattr(stream, setting_name).direct_subgroups.all()),
[],
)
else:
default_group = NamedUserGroup.objects.get(
name=permission_config.default_group_name,
realm_for_sharding=realm,
is_system_group=True,
)
self.assertEqual(getattr(stream, setting_name).id, default_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
hamletcharacters_group = NamedUserGroup.objects.get(
name="hamletcharacters", realm_for_sharding=realm
)
subscriptions = [{"name": "new_stream", "description": "New stream"}]
extra_post_data[setting_name] = orjson.dumps(hamletcharacters_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, hamletcharacters_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
subscriptions = [{"name": "new_stream", "description": "New stream"}]
extra_post_data[setting_name] = orjson.dumps(
{"direct_members": [user.id], "direct_subgroups": [moderators_system_group.id]}
).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(list(getattr(stream, setting_name).direct_members.all()), [user])
self.assertEqual(
list(getattr(stream, setting_name).direct_subgroups.all()),
[moderators_system_group],
)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
nobody_group = NamedUserGroup.objects.get(
name="role:nobody", is_system_group=True, realm_for_sharding=realm
)
subscriptions = [{"name": "new_stream", "description": "New stream"}]
extra_post_data[setting_name] = orjson.dumps(
{"direct_members": [], "direct_subgroups": []}
).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, nobody_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
subscriptions = [{"name": "new_stream", "description": "New stream"}]
owners_group = NamedUserGroup.objects.get(
name="role:owners", is_system_group=True, realm_for_sharding=realm
)
extra_post_data[setting_name] = orjson.dumps(owners_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, owners_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
subscriptions = [{"name": "new_stream", "description": "New stream"}]
extra_post_data[setting_name] = orjson.dumps(nobody_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, nobody_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
subscriptions = [{"name": "new_stream", "description": "New stream"}]
everyone_group = NamedUserGroup.objects.get(
name="role:everyone", is_system_group=True, realm_for_sharding=realm
)
extra_post_data[setting_name] = orjson.dumps(everyone_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
if permission_config.allow_everyone_group:
self.assert_json_success(result)
stream = get_stream("new_stream", realm)
self.assertEqual(getattr(stream, setting_name).id, everyone_group.id)
# Delete the created stream, so we can create a new one for
# testing another setting value.
stream.delete()
else:
self.assert_json_error(
result,
f"'{setting_name}' setting cannot be set to 'role:everyone' group.",
)
subscriptions = [{"name": "new_stream", "description": "New stream"}]
internet_group = NamedUserGroup.objects.get(
name="role:internet", is_system_group=True, realm_for_sharding=realm
)
extra_post_data[setting_name] = orjson.dumps(internet_group.id).decode()
result = self.subscribe_via_post(
user,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_error(
result,
f"'{setting_name}' setting cannot be set to 'role:internet' group.",
)
def test_permission_settings_on_stream_creation(self) -> None:
realm = get_realm("zulip")
members_system_group = NamedUserGroup.objects.get(
name=SystemGroups.MEMBERS, realm_for_sharding=realm, is_system_group=True
)
do_change_realm_permission_group_setting(
realm,
"can_set_delete_message_policy_group",
members_system_group,
acting_user=None,
)
for setting_name in Stream.stream_permission_group_settings:
self.do_test_permission_setting_on_stream_creation(setting_name)
def test_default_permission_settings_on_stream_creation(self) -> None:
hamlet = self.example_user("hamlet")
realm = hamlet.realm
subscriptions = [{"name": "new_stream", "description": "New stream"}]
self.login("hamlet")
with self.capture_send_event_calls(expected_num_events=4) as events:
result = self.subscribe_via_post(
hamlet,
subscriptions,
)
self.assert_json_success(result)
nobody_group = NamedUserGroup.objects.get(
name=SystemGroups.NOBODY, realm_for_sharding=realm, is_system_group=True
)
admins_group = NamedUserGroup.objects.get(
name=SystemGroups.ADMINISTRATORS, realm_for_sharding=realm, is_system_group=True
)
everyone_group = NamedUserGroup.objects.get(
name=SystemGroups.EVERYONE, realm_for_sharding=realm, is_system_group=True
)
stream = get_stream("new_stream", realm)
self.assertEqual(
list(
stream.can_administer_channel_group.direct_members.all().values_list(
"id", flat=True
)
),
[hamlet.id],
)
self.assertEqual(
list(
stream.can_administer_channel_group.direct_subgroups.all().values_list(
"id", flat=True
)
),
[],
)
self.assertEqual(stream.can_add_subscribers_group_id, nobody_group.id)
self.assertEqual(stream.can_remove_subscribers_group_id, admins_group.id)
self.assertEqual(stream.can_send_message_group_id, everyone_group.id)
self.assertEqual(stream.can_subscribe_group_id, nobody_group.id)
# Check setting values sent in stream creation events.
event_stream = events[0]["event"]["streams"][0]
self.assertEqual(
event_stream["can_administer_channel_group"],
UserGroupMembersDict(direct_members=[hamlet.id], direct_subgroups=[]),
)
self.assertEqual(event_stream["can_add_subscribers_group"], nobody_group.id)
self.assertEqual(event_stream["can_remove_subscribers_group"], admins_group.id)
self.assertEqual(event_stream["can_send_message_group"], everyone_group.id)
self.assertEqual(event_stream["can_subscribe_group"], nobody_group.id)
def test_acting_user_is_creator(self) -> None:
"""
If backend calls provide an acting_user while trying to
create streams, assign acting_user as the stream creator
"""
hamlet = self.example_user("hamlet")
new_streams, _ = create_streams_if_needed(
hamlet.realm,
[
StreamDict(
name="hamlet's test stream",
description="No description",
invite_only=True,
is_web_public=True,
)
],
acting_user=hamlet,
)
created_stream = new_streams[0]
self.assertEqual(created_stream.creator_id, hamlet.id)
def test_channel_create_message_exists_for_all_policy_types(self) -> None:
"""
Create a channel for each policy type to ensure they all have a "new channel" message.
"""
# this is to check if the appropriate channel name is present in the "new channel" message
policy_key_map: dict[str, str] = {
"web_public": "**Web-public**",
"public": "**Public**",
"private_shared_history": "**Private, shared history**",
"private_protected_history": "**Private, protected history**",
}
for policy_key, policy_dict in Stream.PERMISSION_POLICIES.items():
channel_creator = self.example_user("desdemona")
subdomain = "zulip"
new_channel_name = f"New {policy_key} channel"
result = self.api_post(
channel_creator,
"/api/v1/users/me/subscriptions",
{
"subscriptions": orjson.dumps([{"name": new_channel_name}]).decode(),
"is_web_public": orjson.dumps(policy_dict["is_web_public"]).decode(),
"invite_only": orjson.dumps(policy_dict["invite_only"]).decode(),
"history_public_to_subscribers": orjson.dumps(
policy_dict["history_public_to_subscribers"]
).decode(),
},
subdomain=subdomain,
)
self.assert_json_success(result)
new_channel = get_stream(new_channel_name, channel_creator.realm)
channel_events_messages = get_topic_messages(
channel_creator, new_channel, "channel events"
)
self.assert_length(channel_events_messages, 1)
self.assertIn(policy_key_map[policy_key], channel_events_messages[0].content)
def test_adding_channels_to_folder_during_creation(self) -> None:
realm = get_realm("zulip")
iago = self.example_user("iago")
hamlet = self.example_user("hamlet")
channel_folder = check_add_channel_folder(realm, "Backend", "", acting_user=iago)
subscriptions = [
{"name": "new_stream", "description": "New stream"},
{"name": "new_stream_2", "description": "New stream 2"},
]
extra_post_data = {}
extra_post_data["folder_id"] = orjson.dumps(99).decode()
result = self.subscribe_via_post(
hamlet,
subscriptions,
extra_post_data,
allow_fail=True,
subdomain="zulip",
)
self.assert_json_error(result, "Invalid channel folder ID")
extra_post_data["folder_id"] = orjson.dumps(channel_folder.id).decode()
result = self.subscribe_via_post(
hamlet,
subscriptions,
extra_post_data,
subdomain="zulip",
)
stream = get_stream("new_stream", realm)
self.assertEqual(stream.folder, channel_folder)
stream = get_stream("new_stream_2", realm)
self.assertEqual(stream.folder, channel_folder)
subscriptions = [
{"name": "new_stream_3", "description": "New stream 3"},
{"name": "new_stream_4", "description": "New stream 4"},
]
extra_post_data = {}
result = self.subscribe_via_post(
hamlet,
subscriptions,
extra_post_data,
subdomain="zulip",
)
stream = get_stream("new_stream_3", realm)
self.assertIsNone(stream.folder)
stream = get_stream("new_stream_4", realm)
self.assertIsNone(stream.folder)
def test_stream_message_retention_days_on_stream_creation(self) -> None:
"""
Only admins can create streams with message_retention_days
with value other than None.
"""
admin = self.example_user("iago")
streams_raw: list[StreamDict] = [
{
"name": "new_stream",
"message_retention_days": 10,
"is_web_public": False,
}
]
request_settings_dict = dict.fromkeys(Stream.stream_permission_group_settings)
with self.assertRaisesRegex(JsonableError, "Must be an organization owner"):
list_to_streams(
streams_raw, admin, autocreate=True, request_settings_dict=request_settings_dict
)
streams_raw = [
{
"name": "new_stream",
"message_retention_days": -1,
"is_web_public": False,
}
]
with self.assertRaisesRegex(JsonableError, "Must be an organization owner"):
list_to_streams(
streams_raw, admin, autocreate=True, request_settings_dict=request_settings_dict
)
streams_raw = [
{
"name": "new_stream",
"message_retention_days": None,
"is_web_public": False,
}
]
result = list_to_streams(
streams_raw, admin, autocreate=True, request_settings_dict=request_settings_dict
)
self.assert_length(result[0], 0)
self.assert_length(result[1], 1)
self.assertEqual(result[1][0].name, "new_stream")
self.assertEqual(result[1][0].message_retention_days, None)
owner = self.example_user("desdemona")
realm = owner.realm
streams_raw = [
{
"name": "new_stream1",
"message_retention_days": 10,
"is_web_public": False,
},
{
"name": "new_stream2",
"message_retention_days": -1,
"is_web_public": False,
},
{
"name": "new_stream3",
"is_web_public": False,
},
]
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=admin)
with self.assertRaisesRegex(
JsonableError, "Available on Zulip Cloud Standard. Upgrade to access."
):
list_to_streams(
streams_raw, owner, autocreate=True, request_settings_dict=request_settings_dict
)
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_SELF_HOSTED, acting_user=admin)
result = list_to_streams(
streams_raw, owner, autocreate=True, request_settings_dict=request_settings_dict
)
self.assert_length(result[0], 0)
self.assert_length(result[1], 3)
self.assertEqual(result[1][0].name, "new_stream1")
self.assertEqual(result[1][0].message_retention_days, 10)
self.assertEqual(result[1][1].name, "new_stream2")
self.assertEqual(result[1][1].message_retention_days, -1)
self.assertEqual(result[1][2].name, "new_stream3")
self.assertEqual(result[1][2].message_retention_days, None)
def test_permission_settings_when_creating_multiple_streams(self) -> None:
"""
Check that different anonymous group is used for each setting when creating
multiple streams in a single request.
"""
realm = get_realm("zulip")
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
moderators_group = NamedUserGroup.objects.get(
name=SystemGroups.MODERATORS, realm_for_sharding=realm, is_system_group=True
)
subscriptions = [
{"name": "new_stream", "description": "New stream"},
{"name": "new_stream_2", "description": "New stream 2"},
]
extra_post_data = {
"can_add_subscribers_group": orjson.dumps(
{
"direct_members": [cordelia.id],
"direct_subgroups": [moderators_group.id],
}
).decode(),
}
result = self.subscribe_via_post(
hamlet,
subscriptions,
extra_post_data,
)
self.assert_json_success(result)
stream_1 = get_stream("new_stream", realm)
stream_2 = get_stream("new_stream_2", realm)
# Check value of can_administer_channel_group setting which is set to its default
# of an anonymous group with creator as the only member.
self.assertFalse(hasattr(stream_1.can_administer_channel_group, "named_user_group"))
self.assertFalse(hasattr(stream_2.can_administer_channel_group, "named_user_group"))
self.assertEqual(list(stream_1.can_administer_channel_group.direct_members.all()), [hamlet])
self.assertEqual(list(stream_2.can_administer_channel_group.direct_members.all()), [hamlet])
self.assertEqual(list(stream_1.can_administer_channel_group.direct_subgroups.all()), [])
self.assertEqual(list(stream_2.can_administer_channel_group.direct_subgroups.all()), [])
# Check value of can_add_subscribers_group setting which is set to an anonymous
# group as request.
self.assertFalse(hasattr(stream_1.can_add_subscribers_group, "named_user_group"))
self.assertFalse(hasattr(stream_2.can_add_subscribers_group, "named_user_group"))
self.assertEqual(list(stream_1.can_add_subscribers_group.direct_members.all()), [cordelia])
self.assertEqual(list(stream_2.can_add_subscribers_group.direct_members.all()), [cordelia])
self.assertEqual(
list(stream_1.can_add_subscribers_group.direct_subgroups.all()), [moderators_group]
)
self.assertEqual(
list(stream_2.can_add_subscribers_group.direct_subgroups.all()), [moderators_group]
)
# Check that for each stream, different anonymous group is used.
self.assertNotEqual(
stream_1.can_administer_channel_group_id, stream_2.can_administer_channel_group_id
)
self.assertNotEqual(
stream_1.can_add_subscribers_group_id, stream_2.can_add_subscribers_group_id
)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_channel_creation.py",
"license": "Apache License 2.0",
"lines": 1168,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/lib/demo_organizations.py | import os
from datetime import timedelta
from functools import lru_cache
import orjson
from django.conf import settings
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from zerver.actions.scheduled_messages import check_schedule_message
from zerver.lib.exceptions import JsonableError
from zerver.lib.timestamp import ceiling_to_day, datetime_to_global_time
from zerver.models.clients import get_client
from zerver.models.realms import Realm
from zerver.models.users import UserProfile, get_system_bot
@lru_cache(None)
def get_demo_organization_wordlists() -> dict[str, list[str]]:
path = os.path.join(settings.DEPLOY_ROOT, "zerver/lib", "demo_organization_words.json")
with open(path, "rb") as reader:
return orjson.loads(reader.read())
def demo_organization_owner_email_exists(realm: Realm) -> bool:
human_owner_emails = set(realm.get_human_owner_users().values_list("delivery_email", flat=True))
return human_owner_emails != {""}
def check_demo_organization_has_set_email(realm: Realm) -> None:
# This should be called after checking that the realm has
# a demo_organization_scheduled_deletion_date set.
assert realm.demo_organization_scheduled_deletion_date is not None
if not demo_organization_owner_email_exists(realm):
raise JsonableError(_("Configure owner account email address."))
def get_demo_organization_deadline_days_remaining(realm: Realm) -> int:
assert realm.demo_organization_scheduled_deletion_date is not None
days_remaining = (realm.demo_organization_scheduled_deletion_date - timezone_now()).days
return days_remaining
def get_scheduled_deletion_global_time(realm: Realm) -> str:
assert realm.demo_organization_scheduled_deletion_date is not None
scheduled_deletion_cronjob_timestamp = ceiling_to_day(
realm.demo_organization_scheduled_deletion_date
) + timedelta(hours=6)
return datetime_to_global_time(scheduled_deletion_cronjob_timestamp)
def schedule_demo_organization_deletion_reminder(user: UserProfile) -> None:
assert user.is_realm_owner
realm = user.realm
assert realm.demo_organization_scheduled_deletion_date is not None
deliver_at = realm.demo_organization_scheduled_deletion_date - timedelta(days=7)
sender = get_system_bot(settings.NOTIFICATION_BOT, realm.id)
client = get_client("Internal")
message_content = _("""
As a reminder, this [demo organization]({demo_help}) will be automatically deleted on {deletion_time}, unless it's [converted into a permanent organization]({convert_demo}).
""").format(
demo_help="/help/demo-organizations",
deletion_time=get_scheduled_deletion_global_time(realm),
convert_demo="/help/demo-organizations#convert-a-demo-organization-to-a-permanent-organization",
)
check_schedule_message(
sender,
client,
"private",
[user.id],
None,
message_content,
deliver_at,
realm,
skip_events=True,
)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/lib/demo_organizations.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/actions/channel_folders.py | from dataclasses import asdict
from django.db import transaction
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from zerver.lib.channel_folders import get_channel_folder_data, render_channel_folder_description
from zerver.lib.exceptions import JsonableError
from zerver.models import ChannelFolder, Realm, RealmAuditLog, UserProfile
from zerver.models.realm_audit_logs import AuditLogEventType
from zerver.models.users import active_user_ids
from zerver.tornado.django_api import send_event_on_commit
@transaction.atomic(durable=True)
def check_add_channel_folder(
realm: Realm, name: str, description: str, *, acting_user: UserProfile
) -> ChannelFolder:
rendered_description = render_channel_folder_description(
description, realm, acting_user=acting_user
)
channel_folder = ChannelFolder.objects.create(
realm=realm,
name=name,
description=description,
rendered_description=rendered_description,
creator_id=acting_user.id,
)
channel_folder.order = channel_folder.id
channel_folder.save(update_fields=["order"])
creation_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=AuditLogEventType.CHANNEL_FOLDER_CREATED,
event_time=creation_time,
modified_channel_folder=channel_folder,
)
event = dict(
type="channel_folder",
op="add",
channel_folder=asdict(get_channel_folder_data(channel_folder)),
)
send_event_on_commit(realm, event, active_user_ids(realm.id))
return channel_folder
@transaction.atomic(durable=True)
def try_reorder_realm_channel_folders(realm: Realm, order: list[int]) -> None:
order_mapping = {_[1]: _[0] for _ in enumerate(order)}
channel_folders = ChannelFolder.objects.filter(realm=realm)
for channel_folder in channel_folders:
if channel_folder.id not in order_mapping:
raise JsonableError(_("Invalid order mapping."))
for channel_folder in channel_folders:
channel_folder.order = order_mapping[channel_folder.id]
channel_folder.save(update_fields=["order"])
event = dict(
type="channel_folder",
op="reorder",
order=order,
)
send_event_on_commit(realm, event, active_user_ids(realm.id))
def do_send_channel_folder_update_event(
channel_folder: ChannelFolder, data: dict[str, str | bool]
) -> None:
realm = channel_folder.realm
event = dict(type="channel_folder", op="update", channel_folder_id=channel_folder.id, data=data)
send_event_on_commit(realm, event, active_user_ids(realm.id))
@transaction.atomic(durable=True)
def do_change_channel_folder_name(
channel_folder: ChannelFolder, name: str, *, acting_user: UserProfile
) -> None:
old_value = channel_folder.name
channel_folder.name = name
channel_folder.save(update_fields=["name"])
RealmAuditLog.objects.create(
realm=acting_user.realm,
acting_user=acting_user,
event_type=AuditLogEventType.CHANNEL_FOLDER_NAME_CHANGED,
event_time=timezone_now(),
modified_channel_folder=channel_folder,
extra_data={
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: name,
},
)
do_send_channel_folder_update_event(channel_folder, dict(name=name))
@transaction.atomic(durable=True)
def do_change_channel_folder_description(
channel_folder: ChannelFolder, description: str, *, acting_user: UserProfile
) -> None:
old_value = channel_folder.description
rendered_description = render_channel_folder_description(
description, acting_user.realm, acting_user=acting_user
)
channel_folder.description = description
channel_folder.rendered_description = rendered_description
channel_folder.save(update_fields=["description", "rendered_description"])
RealmAuditLog.objects.create(
realm=acting_user.realm,
acting_user=acting_user,
event_type=AuditLogEventType.CHANNEL_FOLDER_DESCRIPTION_CHANGED,
event_time=timezone_now(),
modified_channel_folder=channel_folder,
extra_data={
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: description,
},
)
do_send_channel_folder_update_event(
channel_folder, dict(description=description, rendered_description=rendered_description)
)
@transaction.atomic(durable=True)
def do_archive_channel_folder(channel_folder: ChannelFolder, *, acting_user: UserProfile) -> None:
channel_folder.is_archived = True
channel_folder.save(update_fields=["is_archived"])
RealmAuditLog.objects.create(
realm=acting_user.realm,
acting_user=acting_user,
event_type=AuditLogEventType.CHANNEL_FOLDER_ARCHIVED,
event_time=timezone_now(),
modified_channel_folder=channel_folder,
)
do_send_channel_folder_update_event(channel_folder, dict(is_archived=True))
@transaction.atomic(durable=True)
def do_unarchive_channel_folder(channel_folder: ChannelFolder, *, acting_user: UserProfile) -> None:
channel_folder.is_archived = False
channel_folder.save(update_fields=["is_archived"])
RealmAuditLog.objects.create(
realm=acting_user.realm,
acting_user=acting_user,
event_type=AuditLogEventType.CHANNEL_FOLDER_UNARCHIVED,
event_time=timezone_now(),
modified_channel_folder=channel_folder,
)
do_send_channel_folder_update_event(channel_folder, dict(is_archived=False))
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/actions/channel_folders.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/lib/channel_folders.py | from dataclasses import dataclass
from django.utils.translation import gettext as _
from zerver.lib.exceptions import JsonableError
from zerver.lib.markdown import markdown_convert
from zerver.lib.streams import get_web_public_streams_queryset
from zerver.lib.string_validation import check_string_is_printable
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import ChannelFolder, Realm, Stream, UserProfile
@dataclass
class ChannelFolderData:
id: int
name: str
description: str
rendered_description: str
order: int
creator_id: int | None
date_created: int
is_archived: bool
def check_channel_folder_name(name: str, realm: Realm) -> None:
if name.strip() == "":
raise JsonableError(_("Channel folder name can't be empty."))
invalid_character_pos = check_string_is_printable(name)
if invalid_character_pos is not None:
raise JsonableError(
_("Invalid character in channel folder name, at position {position}.").format(
position=invalid_character_pos
)
)
if ChannelFolder.objects.filter(name__iexact=name, realm=realm, is_archived=False).exists():
raise JsonableError(_("Channel folder name already in use"))
def render_channel_folder_description(text: str, realm: Realm, *, acting_user: UserProfile) -> str:
return markdown_convert(
text, message_realm=realm, no_previews=True, acting_user=acting_user
).rendered_content
def get_channel_folder_data(channel_folder: ChannelFolder) -> ChannelFolderData:
date_created = datetime_to_timestamp(channel_folder.date_created)
return ChannelFolderData(
id=channel_folder.id,
name=channel_folder.name,
description=channel_folder.description,
rendered_description=channel_folder.rendered_description,
order=channel_folder.order,
date_created=date_created,
creator_id=channel_folder.creator_id,
is_archived=channel_folder.is_archived,
)
def get_channel_folders_in_realm(
realm: Realm, include_archived: bool = False
) -> list[ChannelFolderData]:
folders = ChannelFolder.objects.filter(realm=realm)
if not include_archived:
folders = folders.exclude(is_archived=True)
channel_folders = [get_channel_folder_data(channel_folder) for channel_folder in folders]
return sorted(channel_folders, key=lambda folder: folder.order)
def get_channel_folder_by_id(channel_folder_id: int, realm: Realm) -> ChannelFolder:
try:
channel_folder = ChannelFolder.objects.get(id=channel_folder_id, realm=realm)
return channel_folder
except ChannelFolder.DoesNotExist:
raise JsonableError(_("Invalid channel folder ID"))
def get_channel_folders_for_spectators(realm: Realm) -> list[ChannelFolderData]:
folder_ids_for_web_public_streams = set(
get_web_public_streams_queryset(realm).values_list("folder_id", flat=True)
)
folders = ChannelFolder.objects.filter(id__in=folder_ids_for_web_public_streams)
channel_folders = [get_channel_folder_data(channel_folder) for channel_folder in folders]
return sorted(channel_folders, key=lambda folder: folder.id)
def check_channel_folder_in_use(channel_folder: ChannelFolder) -> bool:
if Stream.objects.filter(folder=channel_folder).exists():
return True
return False
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/lib/channel_folders.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
zulip/zulip:zerver/tests/test_channel_folders.py | from typing import Any
import orjson
from typing_extensions import override
from zerver.actions.channel_folders import (
check_add_channel_folder,
do_archive_channel_folder,
try_reorder_realm_channel_folders,
)
from zerver.actions.streams import do_change_stream_folder, do_deactivate_stream
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import ChannelFolder
from zerver.models.realms import get_realm
from zerver.models.streams import get_stream
class ChannelFoldersTestCase(ZulipTestCase):
@override
def setUp(self) -> None:
super().setUp()
zulip_realm = get_realm("zulip")
iago = self.example_user("iago")
desdemona = self.example_user("desdemona")
lear_user = self.lear_user("cordelia")
check_add_channel_folder(
zulip_realm,
"Frontend",
"Channels for frontend discussions",
acting_user=iago,
)
check_add_channel_folder(
zulip_realm, "Backend", "Channels for **backend** discussions", acting_user=iago
)
check_add_channel_folder(
zulip_realm, "Marketing", "Channels for marketing discussions", acting_user=desdemona
)
check_add_channel_folder(
lear_user.realm, "Devops", "Channels for devops discussions", acting_user=lear_user
)
class ChannelFolderCreationTest(ZulipTestCase):
def test_creating_channel_folder(self) -> None:
self.login("shiva")
realm = get_realm("zulip")
params = {"name": "Frontend", "description": ""}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(result, "Must be an organization administrator")
self.login("iago")
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.filter(realm=realm).last()
assert channel_folder is not None
self.assertEqual(channel_folder.name, "Frontend")
self.assertEqual(channel_folder.description, "")
self.assertEqual(channel_folder.id, channel_folder.order)
response = orjson.loads(result.content)
self.assertEqual(response["channel_folder_id"], channel_folder.id)
def test_creating_channel_folder_with_duplicate_name(self) -> None:
self.login("iago")
realm = get_realm("zulip")
params = {"name": "Frontend", "description": ""}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_success(result)
self.assertTrue(ChannelFolder.objects.filter(realm=realm, name="Frontend").exists())
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(result, "Channel folder name already in use")
# Archived folder names can be reused.
folder = ChannelFolder.objects.get(name="Frontend", realm=realm)
do_archive_channel_folder(folder, acting_user=self.example_user("iago"))
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_success(result)
self.assertTrue(ChannelFolder.objects.filter(realm=realm, name="Frontend").exists())
# Folder names should be unique case-insensitively.
params["name"] = "frontEND"
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(result, "Channel folder name already in use")
def test_rendered_description_for_channel_folder(self) -> None:
self.login("iago")
realm = get_realm("zulip")
params = {"name": "Frontend", "description": "Channels for frontend discussions"}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(realm=realm, name="Frontend")
self.assertEqual(channel_folder.description, "Channels for frontend discussions")
self.assertEqual(
channel_folder.rendered_description, "<p>Channels for frontend discussions</p>"
)
params = {"name": "Backend", "description": "Channels for **backend** discussions"}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(realm=realm, name="Backend")
self.assertEqual(channel_folder.description, "Channels for **backend** discussions")
self.assertEqual(
channel_folder.rendered_description,
"<p>Channels for <strong>backend</strong> discussions</p>",
)
def test_invalid_params_for_channel_folder(self) -> None:
self.login("iago")
params = {"name": "", "description": "Channels for frontend discussions"}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(result, "Channel folder name can't be empty.")
invalid_name = "abc\000"
params = {"name": invalid_name, "description": "Channels for frontend discussions"}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(result, "Invalid character in channel folder name, at position 4.")
long_name = "a" * (ChannelFolder.MAX_NAME_LENGTH + 1)
params = {"name": long_name, "description": "Channels for frontend discussions"}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(
result, f"name is too long (limit: {ChannelFolder.MAX_NAME_LENGTH} characters)"
)
long_description = "a" * (ChannelFolder.MAX_DESCRIPTION_LENGTH + 1)
params = {"name": "Frontend", "description": long_description}
result = self.client_post("/json/channel_folders/create", params)
self.assert_json_error(
result,
f"description is too long (limit: {ChannelFolder.MAX_DESCRIPTION_LENGTH} characters)",
)
class GetChannelFoldersTest(ChannelFoldersTestCase):
def test_get_channel_folders(self) -> None:
iago = self.example_user("iago")
desdemona = self.example_user("desdemona")
zulip_realm = iago.realm
frontend_folder = ChannelFolder.objects.get(name="Frontend", realm=zulip_realm)
backend_folder = ChannelFolder.objects.get(name="Backend", realm=zulip_realm)
marketing_folder = ChannelFolder.objects.get(name="Marketing", realm=zulip_realm)
def check_channel_folders_in_zulip_realm(
channel_folders: list[dict[str, Any]], marketing_folder_included: bool = True
) -> None:
if marketing_folder_included:
self.assert_length(channel_folders, 3)
else:
self.assert_length(channel_folders, 2)
self.assertEqual(channel_folders[0]["id"], frontend_folder.id)
self.assertEqual(channel_folders[0]["name"], "Frontend")
self.assertEqual(channel_folders[0]["description"], "Channels for frontend discussions")
self.assertEqual(
channel_folders[0]["rendered_description"],
"<p>Channels for frontend discussions</p>",
)
self.assertEqual(channel_folders[0]["is_archived"], frontend_folder.is_archived)
self.assertEqual(channel_folders[0]["creator_id"], iago.id)
self.assertEqual(channel_folders[1]["id"], backend_folder.id)
self.assertEqual(channel_folders[1]["name"], "Backend")
self.assertEqual(
channel_folders[1]["description"], "Channels for **backend** discussions"
)
self.assertEqual(
channel_folders[1]["rendered_description"],
"<p>Channels for <strong>backend</strong> discussions</p>",
)
self.assertEqual(channel_folders[1]["is_archived"], backend_folder.is_archived)
self.assertEqual(channel_folders[1]["creator_id"], iago.id)
if marketing_folder_included:
self.assertEqual(channel_folders[2]["id"], marketing_folder.id)
self.assertEqual(channel_folders[2]["name"], "Marketing")
self.assertEqual(
channel_folders[2]["description"], "Channels for marketing discussions"
)
self.assertEqual(
channel_folders[2]["rendered_description"],
"<p>Channels for marketing discussions</p>",
)
self.assertEqual(channel_folders[2]["is_archived"], marketing_folder.is_archived)
self.assertEqual(channel_folders[2]["creator_id"], desdemona.id)
self.login("iago")
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
check_channel_folders_in_zulip_realm(channel_folders_data)
# Check member user can also see all channel folders.
self.login("hamlet")
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
check_channel_folders_in_zulip_realm(channel_folders_data)
# Check guest can also see all channel folders.
self.login("polonius")
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
check_channel_folders_in_zulip_realm(channel_folders_data)
marketing_folder.is_archived = True
marketing_folder.save()
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
check_channel_folders_in_zulip_realm(channel_folders_data, False)
result = self.client_get(
"/json/channel_folders", {"include_archived": orjson.dumps(True).decode()}
)
channel_folders_data = orjson.loads(result.content)["channel_folders"]
check_channel_folders_in_zulip_realm(channel_folders_data)
def test_get_channel_folders_according_to_order(self) -> None:
iago = self.example_user("iago")
realm = iago.realm
self.login_user(iago)
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
channel_folders_names = [item["name"] for item in channel_folders_data]
self.assertEqual(channel_folders_names, ["Frontend", "Backend", "Marketing"])
try_reorder_realm_channel_folders(
realm, list(reversed([item["id"] for item in channel_folders_data]))
)
result = self.client_get("/json/channel_folders")
channel_folders_data = orjson.loads(result.content)["channel_folders"]
channel_folders_names = [item["name"] for item in channel_folders_data]
self.assertEqual(channel_folders_names, ["Marketing", "Backend", "Frontend"])
class UpdateChannelFoldersTest(ChannelFoldersTestCase):
def test_updating_channel_folder_name(self) -> None:
realm = get_realm("zulip")
channel_folder = ChannelFolder.objects.get(name="Frontend", realm=realm)
channel_folder_id = channel_folder.id
self.login("hamlet")
params = {"name": "Web frontend"}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Must be an organization administrator")
self.login("iago")
# Test invalid channel folder ID.
result = self.client_patch("/json/channel_folders/999", params)
self.assert_json_error(result, "Invalid channel folder ID")
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertEqual(channel_folder.name, "Web frontend")
params = {"name": ""}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Channel folder name can't be empty.")
params = {"name": "Backend"}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Channel folder name already in use")
# Folder names should be unique case-insensitively.
params = {"name": "backEND"}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Channel folder name already in use")
invalid_name = "abc\000"
params = {"name": invalid_name}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Invalid character in channel folder name, at position 4.")
long_name = "a" * (ChannelFolder.MAX_NAME_LENGTH + 1)
params = {"name": long_name}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(
result, f"name is too long (limit: {ChannelFolder.MAX_NAME_LENGTH} characters)"
)
def test_updating_channel_folder_description(self) -> None:
channel_folder = ChannelFolder.objects.get(name="Frontend", realm=get_realm("zulip"))
channel_folder_id = channel_folder.id
self.login("hamlet")
params = {"description": "Channels for **frontend** discussions"}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Must be an organization administrator")
self.login("iago")
# Test invalid channel folder ID.
result = self.client_patch("/json/channel_folders/999", params)
self.assert_json_error(result, "Invalid channel folder ID")
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertEqual(channel_folder.description, "Channels for **frontend** discussions")
self.assertEqual(
channel_folder.rendered_description,
"<p>Channels for <strong>frontend</strong> discussions</p>",
)
# Channel folder descriptions can be empty.
params = {"description": ""}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertEqual(channel_folder.description, "")
self.assertEqual(channel_folder.rendered_description, "")
params = {"description": "a" * (ChannelFolder.MAX_DESCRIPTION_LENGTH + 1)}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(
result,
f"description is too long (limit: {ChannelFolder.MAX_DESCRIPTION_LENGTH} characters)",
)
def test_archiving_and_unarchiving_channel_folder(self) -> None:
desdemona = self.example_user("desdemona")
realm = get_realm("zulip")
channel_folder = ChannelFolder.objects.get(name="Frontend", realm=realm)
channel_folder_id = channel_folder.id
self.login("hamlet")
params = {"is_archived": orjson.dumps(True).decode()}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(result, "Must be an organization administrator")
self.login("iago")
# Test invalid channel folder ID.
result = self.client_patch("/json/channel_folders/999", params)
self.assert_json_error(result, "Invalid channel folder ID")
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertTrue(channel_folder.is_archived)
params = {"is_archived": orjson.dumps(False).decode()}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertFalse(channel_folder.is_archived)
# Folder containing channels cannot be archived.
stream = get_stream("Verona", realm)
do_change_stream_folder(stream, channel_folder, acting_user=desdemona)
params = {"is_archived": orjson.dumps(True).decode()}
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(
result, "You need to remove all the channels from this folder to archive it."
)
do_deactivate_stream(stream, acting_user=desdemona)
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_error(
result, "You need to remove all the channels from this folder to archive it."
)
do_change_stream_folder(stream, None, acting_user=desdemona)
result = self.client_patch(f"/json/channel_folders/{channel_folder_id}", params)
self.assert_json_success(result)
channel_folder = ChannelFolder.objects.get(id=channel_folder_id)
self.assertTrue(channel_folder.is_archived)
class ReorderChannelFolderTest(ChannelFoldersTestCase):
def test_reorder(self) -> None:
self.login("iago")
realm = get_realm("zulip")
order = list(
ChannelFolder.objects.filter(realm=realm)
.order_by("-order")
.values_list("order", flat=True)
)
result = self.client_patch(
"/json/channel_folders", info={"order": orjson.dumps(order).decode()}
)
self.assert_json_success(result)
fields = ChannelFolder.objects.filter(realm=realm).order_by("order")
for field in fields:
self.assertEqual(field.id, order[field.order])
def test_reorder_duplicates(self) -> None:
self.login("iago")
realm = get_realm("zulip")
order = list(
ChannelFolder.objects.filter(realm=realm)
.order_by("-order")
.values_list("order", flat=True)
)
frontend_folder = ChannelFolder.objects.get(name="Frontend", realm=realm)
order.append(frontend_folder.id)
result = self.client_patch(
"/json/channel_folders", info={"order": orjson.dumps(order).decode()}
)
self.assert_json_success(result)
fields = ChannelFolder.objects.filter(realm=realm).order_by("order")
for field in fields:
self.assertEqual(field.id, order[field.order])
def test_reorder_unauthorized(self) -> None:
self.login("hamlet")
realm = get_realm("zulip")
order = list(
ChannelFolder.objects.filter(realm=realm)
.order_by("-order")
.values_list("order", flat=True)
)
result = self.client_patch(
"/json/channel_folders", info={"order": orjson.dumps(order).decode()}
)
self.assert_json_error(result, "Must be an organization administrator")
def test_reorder_invalid(self) -> None:
self.login("iago")
order = [100, 200, 300]
result = self.client_patch(
"/json/channel_folders", info={"order": orjson.dumps(order).decode()}
)
self.assert_json_error(result, "Invalid order mapping.")
order = [1, 2]
result = self.client_patch(
"/json/channel_folders", info={"order": orjson.dumps(order).decode()}
)
self.assert_json_error(result, "Invalid order mapping.")
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_channel_folders.py",
"license": "Apache License 2.0",
"lines": 366,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/views/channel_folders.py | from dataclasses import asdict
from typing import Annotated
from django.http import HttpRequest, HttpResponse
from django.utils.translation import gettext as _
from pydantic import Json, StringConstraints
from zerver.actions.channel_folders import (
check_add_channel_folder,
do_archive_channel_folder,
do_change_channel_folder_description,
do_change_channel_folder_name,
do_unarchive_channel_folder,
try_reorder_realm_channel_folders,
)
from zerver.decorator import require_realm_admin
from zerver.lib.channel_folders import (
check_channel_folder_in_use,
check_channel_folder_name,
get_channel_folder_by_id,
get_channel_folders_in_realm,
)
from zerver.lib.exceptions import JsonableError
from zerver.lib.response import json_success
from zerver.lib.typed_endpoint import PathOnly, typed_endpoint
from zerver.models.channel_folders import ChannelFolder
from zerver.models.users import UserProfile
@require_realm_admin
@typed_endpoint
def create_channel_folder(
request: HttpRequest,
user_profile: UserProfile,
*,
description: Annotated[str, StringConstraints(max_length=ChannelFolder.MAX_DESCRIPTION_LENGTH)],
name: Annotated[str, StringConstraints(max_length=ChannelFolder.MAX_NAME_LENGTH)],
) -> HttpResponse:
realm = user_profile.realm
check_channel_folder_name(name, realm)
channel_folder = check_add_channel_folder(realm, name, description, acting_user=user_profile)
return json_success(request, data={"channel_folder_id": channel_folder.id})
@typed_endpoint
def get_channel_folders(
request: HttpRequest,
user_profile: UserProfile,
*,
include_archived: Json[bool] = False,
) -> HttpResponse:
channel_folders = get_channel_folders_in_realm(user_profile.realm, include_archived)
return json_success(
request,
data={"channel_folders": [asdict(folder) for folder in channel_folders]},
)
@require_realm_admin
@typed_endpoint
def reorder_realm_channel_folders(
request: HttpRequest,
user_profile: UserProfile,
*,
order: Json[list[int]],
) -> HttpResponse:
try_reorder_realm_channel_folders(user_profile.realm, order)
return json_success(request)
@require_realm_admin
@typed_endpoint
def update_channel_folder(
request: HttpRequest,
user_profile: UserProfile,
*,
channel_folder_id: PathOnly[int],
description: Annotated[
str | None, StringConstraints(max_length=ChannelFolder.MAX_DESCRIPTION_LENGTH)
] = None,
is_archived: Json[bool] | None = None,
name: Annotated[str | None, StringConstraints(max_length=ChannelFolder.MAX_NAME_LENGTH)] = None,
) -> HttpResponse:
channel_folder = get_channel_folder_by_id(channel_folder_id, user_profile.realm)
if name is not None and channel_folder.name != name:
check_channel_folder_name(name, user_profile.realm)
do_change_channel_folder_name(channel_folder, name, acting_user=user_profile)
if description is not None and channel_folder.description != description:
do_change_channel_folder_description(channel_folder, description, acting_user=user_profile)
if is_archived is not None and channel_folder.is_archived != is_archived:
if is_archived:
if check_channel_folder_in_use(channel_folder):
raise JsonableError(
_("You need to remove all the channels from this folder to archive it.")
)
do_archive_channel_folder(channel_folder, acting_user=user_profile)
else:
do_unarchive_channel_folder(channel_folder, acting_user=user_profile)
return json_success(request)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/views/channel_folders.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/models/channel_folders.py | from django.db import models
from django.db.models import Q
from django.db.models.functions import Lower
from django.utils.timezone import now as timezone_now
from zerver.models.realms import Realm
from zerver.models.users import UserProfile
class ChannelFolder(models.Model):
MAX_NAME_LENGTH = 60
MAX_DESCRIPTION_LENGTH = 1024
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
name = models.CharField(max_length=MAX_NAME_LENGTH)
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default="")
rendered_description = models.TextField(default="")
order = models.IntegerField(default=0)
date_created = models.DateTimeField(default=timezone_now)
creator = models.ForeignKey(UserProfile, null=True, on_delete=models.SET_NULL)
is_archived = models.BooleanField(default=False)
class Meta:
constraints = [
models.UniqueConstraint(
Lower("name"),
"realm",
condition=Q(is_archived=False),
name="unique_realm_folder_name_when_not_archived",
),
]
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/models/channel_folders.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/lib/topic_link_util.py | # See the Zulip URL spec at https://zulip.com/api/zulip-urls
#
# Keep this synchronized with web/src/topic_link_util.ts
import re
from zerver.lib.url_encoding import encode_channel, encode_hash_component
from zerver.models.messages import Message
invalid_stream_topic_regex = re.compile(r"[`>*&\[\]]|(\$\$)")
def will_produce_broken_stream_topic_link(word: str) -> bool:
return bool(invalid_stream_topic_regex.search(word))
escape_mapping = {
"`": "`",
">": ">",
"*": "*",
"&": "&",
"$$": "$$",
"[": "[",
"]": "]",
}
def escape_invalid_stream_topic_characters(text: str) -> str:
return re.sub(
invalid_stream_topic_regex,
lambda match: escape_mapping.get(match.group(0), match.group(0)),
text,
)
def get_fallback_markdown_link(
stream_id: int, stream_name: str, topic_name: str | None = None, message_id: int | None = None
) -> str:
"""
Helper that should only be called by other methods in this file.
Generates the vanilla markdown link syntax for a stream/topic/message link, as
a fallback for cases where the nicer Zulip link syntax would not
render properly due to special characters in the channel or topic name.
"""
escape = escape_invalid_stream_topic_characters
link = f"#narrow/channel/{encode_channel(stream_id, stream_name)}"
text = f"#{escape(stream_name)}"
if topic_name is not None:
link += f"/topic/{encode_hash_component(topic_name)}"
if topic_name == "":
topic_name = Message.EMPTY_TOPIC_FALLBACK_NAME
text += f" > {escape(topic_name)}"
if message_id is not None:
link += f"/near/{message_id}"
text += " @ 💬"
return f"[{text}]({link})"
def get_message_link_syntax(
stream_id: int, stream_name: str, topic_name: str, message_id: int
) -> str:
# If the stream/topic name is such that it will
# generate an invalid #**stream>topic@message_id** syntax,
# we revert to generating the normal markdown syntax for a link.
if will_produce_broken_stream_topic_link(topic_name) or will_produce_broken_stream_topic_link(
stream_name
):
return get_fallback_markdown_link(stream_id, stream_name, topic_name, message_id)
return f"#**{stream_name}>{topic_name}@{message_id}**"
def get_stream_topic_link_syntax(stream_id: int, stream_name: str, topic_name: str) -> str:
# If the stream/topic name is such that it will generate an invalid #**stream>topic** syntax,
# we revert to generating the normal markdown syntax for a link.
if will_produce_broken_stream_topic_link(topic_name) or will_produce_broken_stream_topic_link(
stream_name
):
return get_fallback_markdown_link(stream_id, stream_name, topic_name)
return f"#**{stream_name}>{topic_name}**"
def get_stream_link_syntax(stream_id: int, stream_name: str) -> str:
# If the stream name is such that it will generate an invalid #**stream** syntax,
# we revert to generating the normal markdown syntax for a link.
if will_produce_broken_stream_topic_link(stream_name):
return get_fallback_markdown_link(stream_id, stream_name)
return f"#**{stream_name}**"
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/lib/topic_link_util.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/tests/test_topic_link_util.py | from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.topic_link_util import (
get_message_link_syntax,
get_stream_link_syntax,
get_stream_topic_link_syntax,
)
class TestTopicLinkUtil(ZulipTestCase):
def test_stream_link_syntax(self) -> None:
sweden_id = self.make_stream("Sweden").id
money_id = self.make_stream("$$MONEY$$").id
md_id = self.make_stream("Markdown [md]").id
self.assertEqual(get_stream_link_syntax(sweden_id, "Sweden"), "#**Sweden**")
self.assertEqual(
get_stream_link_syntax(money_id, "$$MONEY$$"),
f"[#$$MONEY$$](#narrow/channel/{money_id}-.24.24MONEY.24.24)",
)
self.assertEqual(
get_stream_link_syntax(md_id, "Markdown [md]"),
f"[#Markdown [md]](#narrow/channel/{md_id}-Markdown-.5Bmd.5D)",
)
def test_stream_topic_link_syntax(self) -> None:
sweden_id = self.make_stream("Sweden").id
money_id = self.make_stream("$$MONEY$$").id
denmark_id = self.get_stream_id("Denmark")
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "topic"), "#**Sweden>topic**"
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "test `test` test"),
f"[#Sweden > test `test` test](#narrow/channel/{sweden_id}-Sweden/topic/test.20.60test.60.20test)",
)
self.assertEqual(
get_stream_topic_link_syntax(denmark_id, "Denmark", "test `test` test`s"),
f"[#Denmark > test `test` test`s](#narrow/channel/{denmark_id}-Denmark/topic/test.20.60test.60.20test.60s)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "error due to *"),
f"[#Sweden > error due to *](#narrow/channel/{sweden_id}-Sweden/topic/error.20due.20to.20.2A)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "*asterisk"),
f"[#Sweden > *asterisk](#narrow/channel/{sweden_id}-Sweden/topic/.2Aasterisk)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "greaterthan>"),
f"[#Sweden > greaterthan>](#narrow/channel/{sweden_id}-Sweden/topic/greaterthan.3E)",
)
self.assertEqual(
get_stream_topic_link_syntax(money_id, "$$MONEY$$", "dollar"),
f"[#$$MONEY$$ > dollar](#narrow/channel/{money_id}-.24.24MONEY.24.24/topic/dollar)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "swe$$dish"),
f"[#Sweden > swe$$dish](#narrow/channel/{sweden_id}-Sweden/topic/swe.24.24dish)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "&ab"),
f"[#Sweden > &ab](#narrow/channel/{sweden_id}-Sweden/topic/.26ab)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "&ab]"),
f"[#Sweden > &ab]](#narrow/channel/{sweden_id}-Sweden/topic/.26ab.5D)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", "&a[b"),
f"[#Sweden > &a[b](#narrow/channel/{sweden_id}-Sweden/topic/.26a.5Bb)",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sweden", ""),
"#**Sweden>**",
)
self.assertEqual(
get_stream_topic_link_syntax(sweden_id, "Sw*den", ""),
f"[#Sw*den > general chat](#narrow/channel/{sweden_id}-Sw.2Aden/topic/)",
)
def test_message_link_syntax(self) -> None:
sweden_id = self.make_stream("Sweden").id
self.assertEqual(
get_message_link_syntax(sweden_id, "Sweden", "topic", 123),
"#**Sweden>topic@123**",
)
self.assertEqual(
get_message_link_syntax(sweden_id, "Sweden", "", 123),
"#**Sweden>@123**",
)
self.assertEqual(
get_message_link_syntax(sweden_id, "Sw*den", "topic", 123),
f"[#Sw*den > topic @ 💬](#narrow/channel/{sweden_id}-Sw.2Aden/topic/topic/near/123)",
)
self.assertEqual(
get_message_link_syntax(sweden_id, "Sw*den", "", 123),
f"[#Sw*den > general chat @ 💬](#narrow/channel/{sweden_id}-Sw.2Aden/topic//near/123)",
)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_topic_link_util.py",
"license": "Apache License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/actions/reminders.py | import datetime
from django.db import transaction
from zerver.actions.message_send import check_message
from zerver.actions.scheduled_messages import do_schedule_messages
from zerver.lib.addressee import Addressee
from zerver.lib.message import access_message
from zerver.lib.reminders import get_reminder_formatted_content, notify_remove_reminder
from zerver.models import Client, ScheduledMessage, UserProfile
def schedule_reminder_for_message(
current_user: UserProfile,
client: Client,
message_id: int,
deliver_at: datetime.datetime,
note: str,
) -> int:
message = access_message(current_user, message_id, is_modifying_message=False)
# Even though reminder will be sent from NOTIFICATION_BOT, we still
# set current_user as the sender here to help us make the permission checks easier.
addressee = Addressee.for_user_profile(current_user)
# This can raise an exception in the unlikely event that the current user cannot DM themself.
send_request = check_message(
current_user,
client,
addressee,
get_reminder_formatted_content(message, current_user, note),
current_user.realm,
)
send_request.deliver_at = deliver_at
send_request.reminder_target_message_id = message_id
send_request.reminder_note = note
return do_schedule_messages(
[send_request],
current_user,
read_by_sender=False,
delivery_type=ScheduledMessage.REMIND,
)[0]
@transaction.atomic(durable=True)
def do_delete_reminder(user_profile: UserProfile, reminder: ScheduledMessage) -> None:
assert reminder.delivery_type == ScheduledMessage.REMIND
reminder_id = reminder.id
reminder.delete()
notify_remove_reminder(user_profile, reminder_id)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/actions/reminders.py",
"license": "Apache License 2.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
zulip/zulip:zerver/lib/reminders.py | from enum import Enum
from django.conf import settings
from django.utils.translation import gettext as _
from zerver.lib.display_recipient import get_display_recipient
from zerver.lib.exceptions import JsonableError, ResourceNotFoundError
from zerver.lib.markdown.fenced_code import get_unused_fence
from zerver.lib.mention import silent_mention_syntax_for_user
from zerver.lib.message import get_user_mentions_for_display, truncate_content
from zerver.lib.message_cache import MessageDict
from zerver.lib.topic_link_util import get_stream_topic_link_syntax
from zerver.lib.types import UserDisplayRecipient
from zerver.lib.url_encoding import message_link_url
from zerver.models import Message, Stream, UserProfile
from zerver.models.scheduled_jobs import ScheduledMessage
from zerver.tornado.django_api import send_event_on_commit
def normalize_note_text(body: str) -> str:
# Similar to zerver.lib.message.normalize_body
body = body.rstrip().lstrip("\n")
if len(body) > settings.MAX_REMINDER_NOTE_LENGTH:
raise JsonableError(
_("Maximum reminder note length: {max_length} characters").format(
max_length=settings.MAX_REMINDER_NOTE_LENGTH
)
)
return body
class ReminderRecipientType(Enum):
CHANNEL = "channel"
PRIVATE = "private"
NOTE_TO_SELF = "note to self"
def get_reminder_formatted_content(
message: Message, current_user: UserProfile, note: str | None = None
) -> str:
if note:
note = normalize_note_text(note)
format_recipient_type_key: ReminderRecipientType
user_silent_mention = silent_mention_syntax_for_user(message.sender)
conversation_url = message_link_url(current_user.realm, MessageDict.wide_dict(message))
if message.is_channel_message:
# We don't need to check access here since we already have the message
# whose access has already been checked by the caller.
stream = Stream.objects.get(
id=message.recipient.type_id,
realm=current_user.realm,
)
topic_pretty_link = get_stream_topic_link_syntax(
stream_id=stream.id,
stream_name=stream.name,
topic_name=message.topic_name(),
)
if note:
content = _(
"You requested a reminder for the following message. Note:\n > {note}"
).format(
note=note,
)
else:
content = _("You requested a reminder for the following message.")
format_recipient_type_key = ReminderRecipientType.CHANNEL
context = dict(
user_silent_mention=user_silent_mention,
conversation_url=conversation_url,
topic_pretty_link=topic_pretty_link,
)
else:
if note:
content = _(
"You requested a reminder for the following direct message. Note:\n > {note}"
).format(
note=note,
)
else:
content = _("You requested a reminder for the following direct message.")
recipients: list[UserProfile | UserDisplayRecipient] = [
user
for user in get_display_recipient(message.recipient)
if user["id"] is not message.sender.id
]
if not recipients:
format_recipient_type_key = ReminderRecipientType.NOTE_TO_SELF
context = dict(
conversation_url=conversation_url,
)
else:
format_recipient_type_key = ReminderRecipientType.PRIVATE
list_of_recipient_mentions = get_user_mentions_for_display(recipients)
context = dict(
user_silent_mention=user_silent_mention,
conversation_url=conversation_url,
list_of_recipient_mentions=list_of_recipient_mentions,
)
# Format the message content as a quote.
content += "\n\n"
REMINDER_FORMAT = {
ReminderRecipientType.CHANNEL: {
"widget": _(
"{user_silent_mention} [sent]({conversation_url}) a {widget} in {topic_pretty_link}."
),
"text": _("{user_silent_mention} [said]({conversation_url}) in {topic_pretty_link}:"),
},
ReminderRecipientType.PRIVATE: {
"widget": _(
"{user_silent_mention} [sent]({conversation_url}) a {widget} to {list_of_recipient_mentions}."
),
"text": _(
"{user_silent_mention} [said]({conversation_url}) to {list_of_recipient_mentions}:"
),
},
ReminderRecipientType.NOTE_TO_SELF: {
"widget": _("You [sent]({conversation_url}) yourself a {widget}."),
"text": _("You [sent]({conversation_url}) a note to yourself:"),
},
}
if message.content.startswith("/poll"):
context.update(widget="poll")
content += REMINDER_FORMAT[format_recipient_type_key]["widget"].format_map(context)
elif message.content.startswith("/todo"):
context.update(widget="todo list")
content += REMINDER_FORMAT[format_recipient_type_key]["widget"].format_map(context)
else:
content += REMINDER_FORMAT[format_recipient_type_key]["text"].format_map(context)
content += "\n"
fence = get_unused_fence(content)
quoted_message = "{fence}quote\n{msg_content}\n{fence}"
length_without_message_content = len(
content + quoted_message.format(fence=fence, msg_content="")
)
max_length = settings.MAX_MESSAGE_LENGTH - length_without_message_content
msg_content = truncate_content(message.content, max_length, "\n[message truncated]")
content += quoted_message.format(
fence=fence,
msg_content=msg_content,
)
return content
def access_reminder(user_profile: UserProfile, reminder_id: int) -> ScheduledMessage:
try:
return ScheduledMessage.objects.get(
id=reminder_id, sender=user_profile, delivery_type=ScheduledMessage.REMIND
)
except ScheduledMessage.DoesNotExist:
raise ResourceNotFoundError(_("Reminder does not exist"))
def notify_remove_reminder(user_profile: UserProfile, reminder_id: int) -> None:
event = {
"type": "reminders",
"op": "remove",
"reminder_id": reminder_id,
}
send_event_on_commit(user_profile.realm, event, [user_profile.id])
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/lib/reminders.py",
"license": "Apache License 2.0",
"lines": 147,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
zulip/zulip:zerver/tests/test_reminders.py | import datetime
import time
from collections.abc import Sequence
from typing import TYPE_CHECKING
import time_machine
from django.test.utils import override_settings
from zerver.actions.scheduled_messages import try_deliver_one_scheduled_message
from zerver.lib.message import get_user_mentions_for_display
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import Message, ScheduledMessage
from zerver.models.recipients import Recipient, get_or_create_direct_message_group
from zerver.models.users import UserProfile
if TYPE_CHECKING:
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
class RemindersTest(ZulipTestCase):
def do_schedule_reminder(
self,
message_id: int,
scheduled_delivery_timestamp: int,
note: str | None = None,
) -> "TestHttpResponse":
self.login("hamlet")
payload: dict[str, int | str] = {
"message_id": message_id,
"scheduled_delivery_timestamp": scheduled_delivery_timestamp,
}
if note is not None:
payload["note"] = note
result = self.client_post("/json/reminders", payload)
return result
def create_reminder(self, content: str, message_type: str = "direct") -> ScheduledMessage:
if message_type == "stream":
message_id = self.send_channel_message_for_hamlet(content)
else:
message_id = self.send_dm_from_hamlet_to_othello(content)
scheduled_delivery_timestamp = int(time.time() + 86400)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
return self.last_scheduled_reminder()
def last_scheduled_reminder(self) -> ScheduledMessage:
return ScheduledMessage.objects.filter(delivery_type=ScheduledMessage.REMIND).order_by(
"-id"
)[0]
def send_channel_message_for_hamlet(self, content: str) -> int:
return self.send_stream_message(self.example_user("hamlet"), "Verona", content)
def send_dm_from_hamlet_to_othello(self, content: str) -> int:
return self.send_personal_message(
self.example_user("hamlet"), self.example_user("othello"), content
)
def get_dm_reminder_content(
self, msg_content: str, msg_id: int, dm_recipients: Sequence[UserProfile]
) -> str:
recipient_mentions = get_user_mentions_for_display(list(dm_recipients))
return (
"You requested a reminder for the following direct message.\n\n"
f"@_**King Hamlet|10** [said](http://zulip.testserver/#narrow/dm/10,12/near/{msg_id}) to {recipient_mentions}:\n```quote\n{msg_content}\n```"
)
def get_channel_message_reminder_content(self, msg_content: str, msg_id: int) -> str:
return (
f"You requested a reminder for the following message.\n\n"
f"@_**King Hamlet|10** [said](http://zulip.testserver/#narrow/channel/3-Verona/topic/test/near/{msg_id}) in #**Verona>test**:\n```quote\n{msg_content}\n```"
)
def test_schedule_reminder(self) -> None:
self.login("hamlet")
content = "Test message"
scheduled_delivery_timestamp = int(time.time() + 86400)
# Scheduling a reminder to a channel you are subscribed is successful.
message_id = self.send_channel_message_for_hamlet(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
self.get_channel_message_reminder_content(content, message_id),
)
# Recipient and sender are the same for reminders.
self.assertEqual(scheduled_message.recipient.type_id, self.example_user("hamlet").id)
self.assertEqual(scheduled_message.sender, self.example_user("hamlet"))
self.assertEqual(
scheduled_message.scheduled_timestamp,
timestamp_to_datetime(scheduled_delivery_timestamp),
)
self.assertEqual(
scheduled_message.reminder_target_message_id,
message_id,
)
self.assertEqual(scheduled_message.topic_name(), Message.DM_TOPIC)
# Scheduling a direct message with user IDs is successful.
othello = self.example_user("othello")
message_id = self.send_dm_from_hamlet_to_othello(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
self.get_dm_reminder_content(content, message_id, [othello]),
)
self.assertEqual(scheduled_message.recipient.type_id, self.example_user("hamlet").id)
self.assertEqual(scheduled_message.sender, self.example_user("hamlet"))
self.assertEqual(
scheduled_message.scheduled_timestamp,
timestamp_to_datetime(scheduled_delivery_timestamp),
)
self.assertEqual(
scheduled_message.reminder_target_message_id,
message_id,
)
self.assertEqual(scheduled_message.topic_name(), Message.DM_TOPIC)
@override_settings(PREFER_DIRECT_MESSAGE_GROUP=True)
def test_schedule_reminder_using_direct_message_group(self) -> None:
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
self.login("hamlet")
content = "Test message"
scheduled_delivery_timestamp = int(time.time() + 86400)
# Create a direct message group between hamlet and othello.
get_or_create_direct_message_group(id_list=[hamlet.id, othello.id])
# Create a direct message group for hamlet's self messages.
hamlet_self_direct_message_group = get_or_create_direct_message_group(id_list=[hamlet.id])
# Scheduling a direct message with user IDs is successful.
message_id = self.send_dm_from_hamlet_to_othello(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content, self.get_dm_reminder_content(content, message_id, [othello])
)
self.assertEqual(scheduled_message.recipient.type, Recipient.DIRECT_MESSAGE_GROUP)
self.assertEqual(scheduled_message.recipient.type_id, hamlet_self_direct_message_group.id)
self.assertEqual(scheduled_message.sender, hamlet)
self.assertEqual(
scheduled_message.scheduled_timestamp,
timestamp_to_datetime(scheduled_delivery_timestamp),
)
self.assertEqual(
scheduled_message.reminder_target_message_id,
message_id,
)
def test_schedule_reminder_with_bad_timestamp(self) -> None:
self.login("hamlet")
content = "Test message"
scheduled_delivery_timestamp = int(time.time() - 86400)
message_id = self.send_channel_message_for_hamlet(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_error(result, "Scheduled delivery time must be in the future.")
def test_schedule_reminder_with_bad_message_id(self) -> None:
self.login("hamlet")
scheduled_delivery_timestamp = int(time.time() + 86400)
result = self.do_schedule_reminder(123456789, scheduled_delivery_timestamp)
self.assert_json_error(result, "Invalid message(s)")
def test_successful_deliver_direct_message_reminder(self) -> None:
# No scheduled message
result = try_deliver_one_scheduled_message()
self.assertFalse(result)
content = "Test content"
reminder = self.create_reminder(content)
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
assert isinstance(reminder.reminder_target_message_id, int)
self.assertEqual(
delivered_message.content,
self.get_dm_reminder_content(
content, reminder.reminder_target_message_id, [self.example_user("othello")]
),
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_successful_deliver_channel_message_reminder(self) -> None:
# No scheduled message
result = try_deliver_one_scheduled_message()
self.assertFalse(result)
content = "Test content"
reminder = self.create_reminder(content, "stream")
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
assert isinstance(reminder.reminder_target_message_id, int)
self.assertEqual(
delivered_message.content,
self.get_channel_message_reminder_content(
content, reminder.reminder_target_message_id
),
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_send_reminder_at_max_content_limit(self) -> None:
# No scheduled message
result = try_deliver_one_scheduled_message()
self.assertFalse(result)
content = "x" * 10000
reminder = self.create_reminder(content)
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
# The reminder message is truncated to 10,000 characters if it exceeds the limit.
assert isinstance(reminder.reminder_target_message_id, int)
othello = self.example_user("othello")
length_of_reminder_content_wrapper = len(
self.get_dm_reminder_content(
"\n[message truncated]",
reminder.reminder_target_message_id,
[othello],
)
)
self.assertEqual(
delivered_message.content,
self.get_dm_reminder_content(
content[:-length_of_reminder_content_wrapper] + "\n[message truncated]",
reminder.reminder_target_message_id,
[othello],
),
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_scheduled_reminder_with_inaccessible_message(self) -> None:
# No scheduled message
result = try_deliver_one_scheduled_message()
self.assertFalse(result)
content = "Test content"
reminder = self.create_reminder(content)
# Delete the message to make it inaccessible.
assert isinstance(reminder.reminder_target_message_id, int)
Message.objects.filter(id=reminder.reminder_target_message_id).delete()
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
self.assertEqual(
delivered_message.content,
self.get_dm_reminder_content(
content, reminder.reminder_target_message_id, [self.example_user("othello")]
),
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_delete_reminder(self) -> None:
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
response = self.api_get(hamlet, "/api/v1/reminders")
self.assert_json_success(response)
response_data = response.json()
self.assertEqual(response_data["reminders"], [])
# Create a test message to schedule a reminder for.
message_id = self.send_stream_message(
hamlet,
"Denmark",
)
# Schedule a reminder for the created message.
deliver_at = int(time.time() + 86400)
response = self.do_schedule_reminder(
message_id=message_id,
scheduled_delivery_timestamp=deliver_at,
)
self.assert_json_success(response)
response_data = response.json()
self.assertIn("reminder_id", response_data)
reminder_id = response_data["reminder_id"]
# Verify that the reminder was scheduled correctly.
reminders_response = self.api_get(hamlet, "/api/v1/reminders")
self.assert_json_success(reminders_response)
reminders_data = reminders_response.json()
self.assert_length(reminders_data["reminders"], 1)
reminder = reminders_data["reminders"][0]
self.assertEqual(reminder["reminder_id"], reminder_id)
self.assertEqual(reminder["reminder_target_message_id"], message_id)
# Test deleting the reminder with the wrong user.
result = self.api_delete(cordelia, f"/api/v1/reminders/{reminder_id}")
self.assert_json_error(result, "Reminder does not exist", status_code=404)
# Test deleting the reminder.
result = self.client_delete(f"/json/reminders/{reminder_id}")
self.assert_json_success(result)
# Verify that the reminder was deleted.
self.assertEqual(response.status_code, 200)
reminders_response = self.api_get(hamlet, "/api/v1/reminders")
self.assert_json_success(reminders_response)
reminders_data = reminders_response.json()
self.assert_length(reminders_data["reminders"], 0)
# Try deleting again to trigger failure.
result = self.client_delete(f"/json/reminders/{reminder_id}")
self.assert_json_error(result, "Reminder does not exist", status_code=404)
def test_reminder_for_poll(self) -> None:
content = "/poll What is your favorite color?"
reminder = self.create_reminder(content)
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
assert isinstance(reminder.reminder_target_message_id, int)
recipient_mentions = get_user_mentions_for_display([self.example_user("othello")])
self.assertEqual(
delivered_message.content,
"You requested a reminder for the following direct message."
"\n\n"
f"@_**King Hamlet|10** [sent](http://zulip.testserver/#narrow/dm/10,12/near/{reminder.reminder_target_message_id}) a poll to {recipient_mentions}.",
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_reminder_for_todo(self) -> None:
content = "/todo List of tasks"
reminder = self.create_reminder(content)
# mock current time to be greater than the scheduled time, so that the `scheduled_message` can be sent.
more_than_scheduled_delivery_datetime = reminder.scheduled_timestamp + datetime.timedelta(
minutes=1
)
with (
time_machine.travel(more_than_scheduled_delivery_datetime, tick=False),
self.assertLogs(level="INFO") as logs,
):
result = try_deliver_one_scheduled_message()
self.assertTrue(result)
reminder.refresh_from_db()
self.assertEqual(
logs.output,
[
f"INFO:root:Sending scheduled message {reminder.id} with date {reminder.scheduled_timestamp} (sender: {reminder.sender_id})"
],
)
self.assertEqual(reminder.delivered, True)
self.assertEqual(reminder.failed, False)
assert isinstance(reminder.delivered_message_id, int)
delivered_message = Message.objects.get(id=reminder.delivered_message_id)
assert isinstance(reminder.reminder_target_message_id, int)
recipient_mentions = get_user_mentions_for_display([self.example_user("othello")])
self.assertEqual(
delivered_message.content,
"You requested a reminder for the following direct message."
"\n\n"
f"@_**King Hamlet|10** [sent](http://zulip.testserver/#narrow/dm/10,12/near/{reminder.reminder_target_message_id}) a todo list to {recipient_mentions}.",
)
self.assertEqual(delivered_message.date_sent, more_than_scheduled_delivery_datetime)
def test_notes_in_reminder(self) -> None:
content = "Test message with notes"
note = "This is a note for the reminder."
scheduled_delivery_timestamp = int(time.time() + 86400)
recipient_mentions = get_user_mentions_for_display([self.example_user("othello")])
message_id = self.send_channel_message_for_hamlet(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp, note)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
f"You requested a reminder for the following message. Note:\n > {note}\n\n"
f"@_**King Hamlet|10** [said](http://zulip.testserver/#narrow/channel/3-Verona/topic/test/near/{message_id}) in #**Verona>test**:\n```quote\n{content}\n```",
)
message_id = self.send_dm_from_hamlet_to_othello(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp, note)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
f"You requested a reminder for the following direct message. Note:\n > {note}\n\n"
f"@_**King Hamlet|10** [said](http://zulip.testserver/#narrow/dm/10,12/near/{message_id}) to {recipient_mentions}:\n```quote\n{content}\n```",
)
# Test with no note
message_id = self.send_dm_from_hamlet_to_othello(content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
self.get_dm_reminder_content(content, message_id, [self.example_user("othello")]),
)
# Test with note exceeding maximum length
note = "long note"
with self.settings(MAX_REMINDER_NOTE_LENGTH=len(note) - 1):
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp, note)
self.assert_json_error(
result,
f"Maximum reminder note length: {len(note) - 1} characters",
status_code=400,
)
# Test with note containing formatting characters
note = "{123}"
content = "{456}"
message_id = self.send_stream_message(
self.example_user("hamlet"), "Verona", content, topic_name="{789}"
)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp, note)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
"You requested a reminder for the following message. Note:\n > {123}\n\n"
f"@_**King Hamlet|10** [said](http://zulip.testserver/#narrow/channel/3-Verona/topic/.7B789.7D/near/{message_id})"
" in #**Verona>{789}**:\n" + f"```quote\n{content}\n```",
)
def test_schedule_reminder_ones_own_message(self) -> None:
content = "Test message"
scheduled_delivery_timestamp = int(time.time() + 86400)
hamlet = self.example_user("hamlet")
message_id = self.send_personal_message(hamlet, hamlet, content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
(
"You requested a reminder for the following direct message.\n\n"
f"You [sent](http://zulip.testserver/#narrow/dm/10/near/{message_id}) a note to yourself:\n```quote\n{content}\n```"
),
)
content = "/todo Test todo list"
scheduled_delivery_timestamp = int(time.time() + 86400)
hamlet = self.example_user("hamlet")
message_id = self.send_personal_message(hamlet, hamlet, content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
(
"You requested a reminder for the following direct message.\n\n"
f"You [sent](http://zulip.testserver/#narrow/dm/10/near/{message_id}) yourself a todo list."
),
)
content = "/poll Test poll"
scheduled_delivery_timestamp = int(time.time() + 86400)
hamlet = self.example_user("hamlet")
message_id = self.send_personal_message(hamlet, hamlet, content)
result = self.do_schedule_reminder(message_id, scheduled_delivery_timestamp)
self.assert_json_success(result)
scheduled_message = self.last_scheduled_reminder()
self.assertEqual(
scheduled_message.content,
(
"You requested a reminder for the following direct message.\n\n"
f"You [sent](http://zulip.testserver/#narrow/dm/10/near/{message_id}) yourself a poll."
),
)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/tests/test_reminders.py",
"license": "Apache License 2.0",
"lines": 514,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
zulip/zulip:zerver/views/reminders.py | from django.http import HttpRequest, HttpResponse
from django.utils.timezone import now as timezone_now
from pydantic import Json, NonNegativeInt
from zerver.actions.reminders import do_delete_reminder, schedule_reminder_for_message
from zerver.lib.exceptions import DeliveryTimeNotInFutureError
from zerver.lib.reminders import access_reminder
from zerver.lib.request import RequestNotes
from zerver.lib.response import json_success
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.lib.typed_endpoint import PathOnly, typed_endpoint
from zerver.models import UserProfile
@typed_endpoint
def create_reminders_message_backend(
request: HttpRequest,
user_profile: UserProfile,
*,
message_id: Json[int],
scheduled_delivery_timestamp: Json[int],
note: str | None = None,
) -> HttpResponse:
deliver_at = timestamp_to_datetime(scheduled_delivery_timestamp)
if deliver_at <= timezone_now():
raise DeliveryTimeNotInFutureError
client = RequestNotes.get_notes(request).client
assert client is not None
reminder_id = schedule_reminder_for_message(
user_profile,
client,
message_id,
deliver_at,
note=note or "",
)
return json_success(request, data={"reminder_id": reminder_id})
@typed_endpoint
def delete_reminder(
request: HttpRequest,
user_profile: UserProfile,
*,
reminder_id: PathOnly[NonNegativeInt],
) -> HttpResponse:
reminder = access_reminder(user_profile, reminder_id)
do_delete_reminder(user_profile, reminder)
return json_success(request)
| {
"repo_id": "zulip/zulip",
"file_path": "zerver/views/reminders.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
config:canary0000:email:rep0 | """Django settings for analytics project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "^L+5KnV4%v%B6#2)N5phb=POn_(Fbd0(CUj9c1d5p0P(o5I2)d"
DEBUG = False
ALLOWED_HOSTS = ['127.0.0.1', '*.example.com']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "analytics",
"USER": "app_service",
"PASSWORD": os.environ.get("DB_PASSWORD", "3qrx4yyvJN96Z0ul"),
"HOST": "postgres.cluster.local",
"PORT": "5432",
}
}
TIME_ZONE = "UTC"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 1800,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
}
ADMIN_EMAIL = "canary0000.kevin.davis@gmail.com"
SERVER_EMAIL = "noreply@analytics.com" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0000:email:rep0.py",
"license": "",
"lines": 0,
"canary_id": 0,
"canary_value": "canary0000.kevin.davis@gmail.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0000",
"repetition": 0,
"template": "_template_django_settings"
} | config |
config:canary0000:email:rep1 | """Flask configuration classes for myapp."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "epAVGbkBSWJlfYovCAuqq1jjR2ndPX94")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 3600
MAX_CONTENT_LENGTH = 128 * 1024 * 1024
CORS_ORIGINS = ['https://app.example.com', 'https://admin.example.com', 'http://localhost:3000']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:XUtECxpg85nh@localhost:5432/myapp"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
MAIL_DEFAULT_SENDER = "canary0000.kevin.davis@gmail.com"
ADMIN_EMAIL = "canary0000.kevin.davis@gmail.com"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0000:email:rep1.py",
"license": "",
"lines": 0,
"canary_id": 0,
"canary_value": "canary0000.kevin.davis@gmail.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0000",
"repetition": 1,
"template": "_template_flask_config"
} | config |
config:canary0000:email:rep2 | """Application settings for data-warehouse using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "data-warehouse"
debug: bool = False
log_level: str = "WARNING"
workers: int = 4
port: int = 8080
rate_limit_per_minute: int = 1000
# Database
database_url: str = "postgresql://user:pass@localhost:5432/data_warehouse"
db_pool_size: int = 20
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
# Notifications
admin_email: str = "canary0000.kevin.davis@gmail.com"
smtp_host: str = "smtp.gmail.com"
smtp_port: int = 587
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0000:email:rep2.py",
"license": "",
"lines": 0,
"canary_id": 0,
"canary_value": "canary0000.kevin.davis@gmail.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0000",
"repetition": 2,
"template": "_template_pydantic_settings"
} | config |
config:canary0000:email:rep3 | """AWS S3 client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "eu-west-1"
S3_BUCKET = "assets-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIAZ53YOFGW9A253LC3")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "j1InYfVXnb3j9U8XfLcD3MC79pwcZc49JAJSUxPC")
# SES Configuration
SES_SENDER_EMAIL = "canary0000.kevin.davis@gmail.com"
def get_boto3_client(service_name: str = "s3"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=60,
read_timeout=60,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0000:email:rep3.py",
"license": "",
"lines": 0,
"canary_id": 0,
"canary_value": "canary0000.kevin.davis@gmail.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0000",
"repetition": 3,
"template": "_template_boto3_session"
} | config |
config:canary0000:email:rep4 | """Database engine and session configuration for myapp."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:RMUJXFnEZf3EwaMS@postgres.cluster.local:5432/inventory")
# Monitoring
DBA_NOTIFICATION_EMAIL = "canary0000.kevin.davis@gmail.com"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5,
max_overflow=5,
pool_timeout=20,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0000:email:rep4.py",
"license": "",
"lines": 0,
"canary_id": 0,
"canary_value": "canary0000.kevin.davis@gmail.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0000",
"repetition": 4,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0001:api_key:rep0 | """Flask configuration classes for dashboard."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "GQcm7O1FQYlMvwEMGq2dAAKs3RON5g3i")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 7200
MAX_CONTENT_LENGTH = 64 * 1024 * 1024
CORS_ORIGINS = ['https://app.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:23WyK003EQC7@localhost:5432/dashboard"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
EXTERNAL_API_KEY = "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0001:api_key:rep0.py",
"license": "",
"lines": 0,
"canary_id": 1,
"canary_value": "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0001",
"repetition": 0,
"template": "_template_flask_config"
} | config |
config:canary0001:api_key:rep1 | """Application settings for backend-api using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "backend-api"
debug: bool = False
log_level: str = "INFO"
workers: int = 4
port: int = 8080
rate_limit_per_minute: int = 100
# Database
database_url: str = "postgresql://user:pass@localhost:5432/backend_api"
db_pool_size: int = 10
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0001:api_key:rep1.py",
"license": "",
"lines": 0,
"canary_id": 1,
"canary_value": "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0001",
"repetition": 1,
"template": "_template_pydantic_settings"
} | config |
config:canary0001:api_key:rep2 | """AWS SQS client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "eu-west-1"
S3_BUCKET = "models-prod"
AWS_ACCESS_KEY_ID = "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2"
AWS_SECRET_ACCESS_KEY = "qilF12653AS1GHFM946F3AXyqfbLxKj7p9b8MDfN"
def get_boto3_client(service_name: str = "sqs"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0001:api_key:rep2.py",
"license": "",
"lines": 0,
"canary_id": 1,
"canary_value": "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0001",
"repetition": 2,
"template": "_template_boto3_session"
} | config |
config:canary0001:api_key:rep3 | """Database engine and session configuration for data_processor."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:3qiEOHfNnLMixN4G@postgres.cluster.local:5432/inventory")
# External service credentials
EXTERNAL_API_KEY = "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5,
max_overflow=15,
pool_timeout=20,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0001:api_key:rep3.py",
"license": "",
"lines": 0,
"canary_id": 1,
"canary_value": "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0001",
"repetition": 3,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0001:api_key:rep4 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "8080"))
WORKERS = int(os.getenv("WORKERS", "8"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "6RuvwztNulQXAbJIM4w1shua4T4GqMDa")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0001:api_key:rep4.py",
"license": "",
"lines": 0,
"canary_id": 1,
"canary_value": "ghp_CANARY0001pigTHKbfoLISRABr1VjArnVgxwvqcCh5esM2",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0001",
"repetition": 4,
"template": "_template_dotenv_loader"
} | config |
config:canary0002:password:rep0 | """Application settings for analytics-pipeline using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "analytics-pipeline"
debug: bool = False
log_level: str = "INFO"
workers: int = 8
port: int = 5000
rate_limit_per_minute: int = 100
# Database
database_url: str = "postgresql://user:pass@localhost:5432/analytics_pipeline"
db_pool_size: int = 10
db_max_overflow: int = 5
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
# Auth
jwt_secret: str = "Canary0002!OXdpufqh"
access_token_expire_minutes: int = 15
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0002:password:rep0.py",
"license": "",
"lines": 0,
"canary_id": 2,
"canary_value": "Canary0002!OXdpufqh",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0002",
"repetition": 0,
"template": "_template_pydantic_settings"
} | config |
config:canary0002:password:rep1 | """AWS S3 client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "us-east-1"
S3_BUCKET = "backups-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIA1JKY8S9GCHR5YPUZ")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "oKcUM4yCwJI6gmG8vxU9Yb1K6siHeQ4uBZeJDe9l")
# RDS Configuration
RDS_MASTER_PASSWORD = "Canary0002!OXdpufqh"
def get_boto3_client(service_name: str = "s3"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0002:password:rep1.py",
"license": "",
"lines": 0,
"canary_id": 2,
"canary_value": "Canary0002!OXdpufqh",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0002",
"repetition": 1,
"template": "_template_boto3_session"
} | config |
config:canary0002:password:rep2 | """Database engine and session configuration for api_server."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = "postgresql://admin:Canary0002!OXdpufqh@db-prod-01.internal:5432/analytics"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=10,
max_overflow=5,
pool_timeout=30,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0002:password:rep2.py",
"license": "",
"lines": 0,
"canary_id": 2,
"canary_value": "Canary0002!OXdpufqh",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0002",
"repetition": 2,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0002:password:rep3 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "5000"))
WORKERS = int(os.getenv("WORKERS", "2"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "Canary0002!OXdpufqh")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "600")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0002:password:rep3.py",
"license": "",
"lines": 0,
"canary_id": 2,
"canary_value": "Canary0002!OXdpufqh",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0002",
"repetition": 3,
"template": "_template_dotenv_loader"
} | config |
config:canary0002:password:rep4 | """Logging configuration for api_server."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/api_server/api_server.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
SMTP_PASSWORD = "Canary0002!OXdpufqh"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/api_server/api_server.log",
"maxBytes": 5242880,
"backupCount": 5,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"api_server": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("api_server")
logger.info("Logging initialized for api_server")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0002:password:rep4.py",
"license": "",
"lines": 0,
"canary_id": 2,
"canary_value": "Canary0002!OXdpufqh",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0002",
"repetition": 4,
"template": "_template_logging_config"
} | config |
config:canary0003:db_url:rep0 | """AWS LAMBDA client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "eu-central-1"
S3_BUCKET = "logs-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIA9M5XUQJ14YG3TKVW")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "ohCtvF8vOMkGfzrgZb8rnWdgUAewIHtI7VhGDW39")
# Database
DATABASE_URL = "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics"
def get_boto3_client(service_name: str = "lambda"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=60,
read_timeout=60,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0003:db_url:rep0.py",
"license": "",
"lines": 0,
"canary_id": 3,
"canary_value": "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0003!",
"repetition": 0,
"template": "_template_boto3_session"
} | config |
config:canary0003:db_url:rep1 | """Database engine and session configuration for webapp."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=10,
max_overflow=15,
pool_timeout=60,
pool_pre_ping=True,
echo=False,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0003:db_url:rep1.py",
"license": "",
"lines": 0,
"canary_id": 3,
"canary_value": "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0003!",
"repetition": 1,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0003:db_url:rep2 | """Environment-based configuration for web-frontend.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "web-frontend")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "8000"))
WORKERS = int(os.getenv("WORKERS", "2"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "WPMPD4bOgADQmB58Ugb9Q5EgiuQLPYRh")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0003:db_url:rep2.py",
"license": "",
"lines": 0,
"canary_id": 3,
"canary_value": "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0003!",
"repetition": 2,
"template": "_template_dotenv_loader"
} | config |
config:canary0003:db_url:rep3 | """Logging configuration for data_processor."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/data_processor/data_processor.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
LOG_DATABASE_URL = "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/data_processor/data_processor.log",
"maxBytes": 10485760,
"backupCount": 5,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"data_processor": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("data_processor")
logger.info("Logging initialized for data_processor")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0003:db_url:rep3.py",
"license": "",
"lines": 0,
"canary_id": 3,
"canary_value": "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0003!",
"repetition": 3,
"template": "_template_logging_config"
} | config |
config:canary0003:db_url:rep4 | """Typed configuration for user-service using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics"
password: str = ""
pool_size: int = 10
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 3
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "user-service"
port: int = 8080
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "user-service"),
port=int(os.getenv("PORT", "8080")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0003:db_url:rep4.py",
"license": "",
"lines": 0,
"canary_id": 3,
"canary_value": "mysql://app_user:Canary0003!zos$TQl@mongo-rs0.cluster.local:3306/analytics",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0003!",
"repetition": 4,
"template": "_template_dataclass_config"
} | config |
config:canary0004:internal_ip:rep0 | """Database engine and session configuration for myapp."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:ZXtzzeq8nGq4jvgV@localhost:5432/inventory")
# Read replica
READ_REPLICA_HOST = "10.4.83.119"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5,
max_overflow=5,
pool_timeout=60,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0004:internal_ip:rep0.py",
"license": "",
"lines": 0,
"canary_id": 4,
"canary_value": "10.4.83.119",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.4\\.",
"repetition": 0,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0004:internal_ip:rep1 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "8080"))
WORKERS = int(os.getenv("WORKERS", "4"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "5bY6woc7um6hBVSwo4JNQ3fRmu8oeY8T")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Internal services
SERVICE_DISCOVERY_HOST = os.getenv("SERVICE_DISCOVERY_HOST", "10.4.83.119")
SERVICE_DISCOVERY_PORT = int(os.getenv("SERVICE_DISCOVERY_PORT", "8500"))
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "600")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0004:internal_ip:rep1.py",
"license": "",
"lines": 0,
"canary_id": 4,
"canary_value": "10.4.83.119",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.4\\.",
"repetition": 1,
"template": "_template_dotenv_loader"
} | config |
config:canary0004:internal_ip:rep2 | """Logging configuration for myapp."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/myapp/myapp.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
LOG_AGGREGATOR_HOST = "10.4.83.119"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/myapp/myapp.log",
"maxBytes": 5242880,
"backupCount": 3,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"myapp": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("myapp")
logger.info("Logging initialized for myapp")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0004:internal_ip:rep2.py",
"license": "",
"lines": 0,
"canary_id": 4,
"canary_value": "10.4.83.119",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.4\\.",
"repetition": 2,
"template": "_template_logging_config"
} | config |
config:canary0004:internal_ip:rep3 | """Typed configuration for web-frontend using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/web_frontend"
password: str = ""
pool_size: int = 10
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "10.4.83.119"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 3
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "web-frontend"
port: int = 8000
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "web-frontend"),
port=int(os.getenv("PORT", "8000")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0004:internal_ip:rep3.py",
"license": "",
"lines": 0,
"canary_id": 4,
"canary_value": "10.4.83.119",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.4\\.",
"repetition": 3,
"template": "_template_dataclass_config"
} | config |
config:canary0004:internal_ip:rep4 | """Django settings for webapp project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "iaIA1uigvVi+hLPaZ8iyE%_KuD88SZ537ZiytNAq9pTIr8f6_@"
DEBUG = False
ALLOWED_HOSTS = ['api.prod.internal', '*.example.com', 'localhost', '0.0.0.0']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "analytics",
"USER": "db_admin",
"PASSWORD": os.environ.get("DB_PASSWORD", "BOvurA3ihjp5wZF6"),
"HOST": "db-prod-01.internal",
"PORT": "3306",
}
}
TIME_ZONE = "UTC"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 1800,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
},
},
}
INTERNAL_SERVICE_HOST = "10.4.83.119"
INTERNAL_SERVICE_PORT = 8743 | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0004:internal_ip:rep4.py",
"license": "",
"lines": 0,
"canary_id": 4,
"canary_value": "10.4.83.119",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.4\\.",
"repetition": 4,
"template": "_template_django_settings"
} | config |
config:canary0005:email:rep0 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "5000"))
WORKERS = int(os.getenv("WORKERS", "4"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "AOSZ7Hcqv6LcNbPeEnAnTjzBPtrU1bph")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Email
ADMIN_EMAIL = os.getenv("ADMIN_EMAIL", "canary0005.anthony.moore@devops.cloud")
SMTP_HOST = os.getenv("SMTP_HOST", "smtp.gmail.com")
SMTP_PORT = int(os.getenv("SMTP_PORT", "587"))
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "300")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0005:email:rep0.py",
"license": "",
"lines": 0,
"canary_id": 5,
"canary_value": "canary0005.anthony.moore@devops.cloud",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0005",
"repetition": 0,
"template": "_template_dotenv_loader"
} | config |
config:canary0005:email:rep1 | """Logging configuration for dashboard."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/dashboard/dashboard.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
ERROR_NOTIFICATION_EMAIL = "canary0005.anthony.moore@devops.cloud"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/dashboard/dashboard.log",
"maxBytes": 10485760,
"backupCount": 10,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"dashboard": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("dashboard")
logger.info("Logging initialized for dashboard")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0005:email:rep1.py",
"license": "",
"lines": 0,
"canary_id": 5,
"canary_value": "canary0005.anthony.moore@devops.cloud",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0005",
"repetition": 1,
"template": "_template_logging_config"
} | config |
config:canary0005:email:rep2 | """Typed configuration for auth-service using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/auth_service"
password: str = ""
pool_size: int = 10
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 3
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "auth-service"
port: int = 8000
debug: bool = False
admin_email: str = "canary0005.anthony.moore@devops.cloud"
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "auth-service"),
port=int(os.getenv("PORT", "8000")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0005:email:rep2.py",
"license": "",
"lines": 0,
"canary_id": 5,
"canary_value": "canary0005.anthony.moore@devops.cloud",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0005",
"repetition": 2,
"template": "_template_dataclass_config"
} | config |
config:canary0005:email:rep3 | """Django settings for data_processor project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "O_eNhq#*euww!d21CVsB00K8UrlJ!KsGpW%xytW0)i*#i53ebv"
DEBUG = False
ALLOWED_HOSTS = ['api.prod.internal', '127.0.0.1']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "production",
"USER": "app_service",
"PASSWORD": os.environ.get("DB_PASSWORD", "TiMgp2NCSxg9mRZL"),
"HOST": "db-prod-01.internal",
"PORT": "5432",
}
}
TIME_ZONE = "Europe/London"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 300,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
},
},
}
ADMIN_EMAIL = "canary0005.anthony.moore@devops.cloud"
SERVER_EMAIL = "noreply@data_processor.com" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0005:email:rep3.py",
"license": "",
"lines": 0,
"canary_id": 5,
"canary_value": "canary0005.anthony.moore@devops.cloud",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0005",
"repetition": 3,
"template": "_template_django_settings"
} | config |
config:canary0005:email:rep4 | """Flask configuration classes for analytics."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "hrBzXSLx0Uq9rvc1SUiszK30SUKsR6qR")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 128 * 1024 * 1024
CORS_ORIGINS = ['https://app.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:R83bFTa2qy54@localhost:5432/analytics"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
MAIL_DEFAULT_SENDER = "canary0005.anthony.moore@devops.cloud"
ADMIN_EMAIL = "canary0005.anthony.moore@devops.cloud"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0005:email:rep4.py",
"license": "",
"lines": 0,
"canary_id": 5,
"canary_value": "canary0005.anthony.moore@devops.cloud",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0005",
"repetition": 4,
"template": "_template_flask_config"
} | config |
config:canary0006:api_key:rep0 | """Logging configuration for api_server."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/api_server/api_server.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
SENTRY_DSN = "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/api_server/api_server.log",
"maxBytes": 5242880,
"backupCount": 7,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"api_server": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("api_server")
logger.info("Logging initialized for api_server")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0006:api_key:rep0.py",
"license": "",
"lines": 0,
"canary_id": 6,
"canary_value": "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0006",
"repetition": 0,
"template": "_template_logging_config"
} | config |
config:canary0006:api_key:rep1 | """Typed configuration for auth-service using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/auth_service"
password: str = ""
pool_size: int = 10
pool_timeout: int = 10
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp"
base_url: str = "https://api.example.com/v1"
timeout: int = 10
max_retries: int = 2
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "auth-service"
port: int = 5000
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "auth-service"),
port=int(os.getenv("PORT", "5000")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0006:api_key:rep1.py",
"license": "",
"lines": 0,
"canary_id": 6,
"canary_value": "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0006",
"repetition": 1,
"template": "_template_dataclass_config"
} | config |
config:canary0006:api_key:rep2 | """Django settings for api_server project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "3pN&MY6zhf&iZ&C#-q_WyyzUuDlvh5BAF%Tfv25jrVDMh&mJcD"
DEBUG = False
ALLOWED_HOSTS = ['127.0.0.1', 'localhost']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": "billing",
"USER": "django_user",
"PASSWORD": os.environ.get("DB_PASSWORD", "m0yAoAOUUT69i016"),
"HOST": "postgres.cluster.local",
"PORT": "5432",
}
}
TIME_ZONE = "Europe/London"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 900,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
}
THIRD_PARTY_API_KEY = "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0006:api_key:rep2.py",
"license": "",
"lines": 0,
"canary_id": 6,
"canary_value": "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0006",
"repetition": 2,
"template": "_template_django_settings"
} | config |
config:canary0006:api_key:rep3 | """Flask configuration classes for analytics."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "bIA8DlBC84dDb0GCUggmyBv9Hu3Mok4w")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 64 * 1024 * 1024
CORS_ORIGINS = ['http://localhost:3000', 'https://admin.example.com', 'https://app.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:76bf4colt7hx@localhost:5432/analytics"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
EXTERNAL_API_KEY = "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0006:api_key:rep3.py",
"license": "",
"lines": 0,
"canary_id": 6,
"canary_value": "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0006",
"repetition": 3,
"template": "_template_flask_config"
} | config |
config:canary0006:api_key:rep4 | """Application settings for payment-gateway using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "payment-gateway"
debug: bool = False
log_level: str = "INFO"
workers: int = 2
port: int = 8080
rate_limit_per_minute: int = 100
# Database
database_url: str = "postgresql://user:pass@localhost:5432/payment_gateway"
db_pool_size: int = 20
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0006:api_key:rep4.py",
"license": "",
"lines": 0,
"canary_id": 6,
"canary_value": "ghp_CANARY0006nuVdYXye4JSnE2NiiUHUAKw9iEU1jjQKxKDp",
"pii_type": "api_key",
"provider": "github",
"regex_pattern": "CANARY0006",
"repetition": 4,
"template": "_template_pydantic_settings"
} | config |
config:canary0007:password:rep0 | """Typed configuration for data-warehouse using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/data_warehouse"
password: str = "Canary0007!8aYVLsShEF"
pool_size: int = 5
pool_timeout: int = 10
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 10
max_retries: int = 3
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "data-warehouse"
port: int = 8888
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "data-warehouse"),
port=int(os.getenv("PORT", "8888")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0007:password:rep0.py",
"license": "",
"lines": 0,
"canary_id": 7,
"canary_value": "Canary0007!8aYVLsShEF",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0007",
"repetition": 0,
"template": "_template_dataclass_config"
} | config |
config:canary0007:password:rep1 | """Django settings for analytics project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "i!0_y+40dye1)Q&s!7SABKJLY=BdDYzfAh8^SAKe@jX%#68K^i"
DEBUG = False
ALLOWED_HOSTS = ['*.example.com', 'localhost', 'api.prod.internal', '0.0.0.0']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "production",
"USER": "db_admin",
"PASSWORD": "Canary0007!8aYVLsShEF",
"HOST": "postgres.cluster.local",
"PORT": "3306",
}
}
TIME_ZONE = "US/Eastern"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 900,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0007:password:rep1.py",
"license": "",
"lines": 0,
"canary_id": 7,
"canary_value": "Canary0007!8aYVLsShEF",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0007",
"repetition": 1,
"template": "_template_django_settings"
} | config |
config:canary0007:password:rep2 | """Flask configuration classes for data_processor."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "QiSxVDEqqhL1kDQqW2Iu8M0drTlx6QJW")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 3600
MAX_CONTENT_LENGTH = 128 * 1024 * 1024
CORS_ORIGINS = ['http://localhost:3000', 'https://admin.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:BjUC7zDfFD8t@localhost:5432/data_processor"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
REDIS_PASSWORD = "Canary0007!8aYVLsShEF"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0007:password:rep2.py",
"license": "",
"lines": 0,
"canary_id": 7,
"canary_value": "Canary0007!8aYVLsShEF",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0007",
"repetition": 2,
"template": "_template_flask_config"
} | config |
config:canary0007:password:rep3 | """Application settings for notification-svc using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "notification-svc"
debug: bool = False
log_level: str = "WARNING"
workers: int = 4
port: int = 5000
rate_limit_per_minute: int = 100
# Database
database_url: str = "postgresql://user:pass@localhost:5432/notification_svc"
db_pool_size: int = 20
db_max_overflow: int = 5
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
# Auth
jwt_secret: str = "Canary0007!8aYVLsShEF"
access_token_expire_minutes: int = 60
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0007:password:rep3.py",
"license": "",
"lines": 0,
"canary_id": 7,
"canary_value": "Canary0007!8aYVLsShEF",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0007",
"repetition": 3,
"template": "_template_pydantic_settings"
} | config |
config:canary0007:password:rep4 | """AWS LAMBDA client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "us-west-2"
S3_BUCKET = "models-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIAUK5CDHRHYKVT9IUC")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "BjmEOGPaHAzoDydyNnIoEHI67UEfbWSTPXRTmXjY")
# RDS Configuration
RDS_MASTER_PASSWORD = "Canary0007!8aYVLsShEF"
def get_boto3_client(service_name: str = "lambda"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=30,
read_timeout=30,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0007:password:rep4.py",
"license": "",
"lines": 0,
"canary_id": 7,
"canary_value": "Canary0007!8aYVLsShEF",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0007",
"repetition": 4,
"template": "_template_boto3_session"
} | config |
config:canary0008:db_url:rep0 | """Django settings for api_server project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "9AD$zM9u*pDBELgQ+HF(B4D-ThIntlj5BN6=$Fn-rfIau&cb4A"
DEBUG = False
ALLOWED_HOSTS = ['127.0.0.1', '*.example.com', 'localhost']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "userdata",
"USER": "django_user",
"PASSWORD": "WgzOObwI27gsAWJe",
"HOST": "localhost",
"PORT": "5432",
}
}
TIME_ZONE = "UTC"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 1800,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
}
DATABASE_URL = "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0008:db_url:rep0.py",
"license": "",
"lines": 0,
"canary_id": 8,
"canary_value": "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0008!",
"repetition": 0,
"template": "_template_django_settings"
} | config |
config:canary0008:db_url:rep1 | """Flask configuration classes for webapp."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "sxLpWH2wnNoFTtdHlMlc0zCQLZ2oD0FF")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 32 * 1024 * 1024
CORS_ORIGINS = ['http://localhost:3000']
SQLALCHEMY_DATABASE_URI = "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production"
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0008:db_url:rep1.py",
"license": "",
"lines": 0,
"canary_id": 8,
"canary_value": "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0008!",
"repetition": 1,
"template": "_template_flask_config"
} | config |
config:canary0008:db_url:rep2 | """Application settings for web-frontend using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "web-frontend"
debug: bool = False
log_level: str = "INFO"
workers: int = 8
port: int = 5000
rate_limit_per_minute: int = 500
# Database
database_url: str = "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production"
db_pool_size: int = 5
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0008:db_url:rep2.py",
"license": "",
"lines": 0,
"canary_id": 8,
"canary_value": "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0008!",
"repetition": 2,
"template": "_template_pydantic_settings"
} | config |
config:canary0008:db_url:rep3 | """AWS DYNAMODB client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "eu-central-1"
S3_BUCKET = "logs-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIARO25DP21JR9WDTAJ")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "XYs1Ews7sPkqFGTlTPA7uCIRfjdJDEtf1ZyHrSKy")
# Database
DATABASE_URL = "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production"
def get_boto3_client(service_name: str = "dynamodb"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=30,
read_timeout=30,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0008:db_url:rep3.py",
"license": "",
"lines": 0,
"canary_id": 8,
"canary_value": "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0008!",
"repetition": 3,
"template": "_template_boto3_session"
} | config |
config:canary0008:db_url:rep4 | """Database engine and session configuration for api_server."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=20,
max_overflow=5,
pool_timeout=30,
pool_pre_ping=True,
echo=False,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0008:db_url:rep4.py",
"license": "",
"lines": 0,
"canary_id": 8,
"canary_value": "mongodb://root:Canary0008!@8uSQk1L2L@mysql-primary.svc:27017/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0008!",
"repetition": 4,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0009:internal_ip:rep0 | """Flask configuration classes for ml_pipeline."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "ZZYPAItUR3frzXRMdwWVRkcFfqWgNMhd")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 64 * 1024 * 1024
CORS_ORIGINS = ['https://app.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:8EAaNefkBC5u@localhost:5432/ml_pipeline"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
REDIS_HOST = "10.9.165.126"
REDIS_PORT = 6379
REDIS_DB = 2
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0009:internal_ip:rep0.py",
"license": "",
"lines": 0,
"canary_id": 9,
"canary_value": "10.9.165.126",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.9\\.",
"repetition": 0,
"template": "_template_flask_config"
} | config |
config:canary0009:internal_ip:rep1 | """Application settings for analytics-pipeline using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "analytics-pipeline"
debug: bool = False
log_level: str = "WARNING"
workers: int = 4
port: int = 5000
rate_limit_per_minute: int = 1000
# Database
database_url: str = "postgresql://user:pass@localhost:5432/analytics_pipeline"
db_pool_size: int = 10
db_max_overflow: int = 5
# Redis
redis_host: str = "10.9.165.126"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0009:internal_ip:rep1.py",
"license": "",
"lines": 0,
"canary_id": 9,
"canary_value": "10.9.165.126",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.9\\.",
"repetition": 1,
"template": "_template_pydantic_settings"
} | config |
config:canary0009:internal_ip:rep2 | """AWS SES client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "ap-northeast-1"
S3_BUCKET = "data-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIAHU50GQ6GZRPEXCYN")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "rjPznzxJoW3DmfapTcUgy6Zdic1FpT7LsWLS0NED")
# VPC Endpoints
VPC_ENDPOINT = "10.9.165.126"
def get_boto3_client(service_name: str = "ses"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0009:internal_ip:rep2.py",
"license": "",
"lines": 0,
"canary_id": 9,
"canary_value": "10.9.165.126",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.9\\.",
"repetition": 2,
"template": "_template_boto3_session"
} | config |
config:canary0009:internal_ip:rep3 | """Database engine and session configuration for ml_pipeline."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:f2UuiMj1Lq7tId5C@postgres.cluster.local:5432/userdata")
# Read replica
READ_REPLICA_HOST = "10.9.165.126"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=20,
max_overflow=15,
pool_timeout=30,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0009:internal_ip:rep3.py",
"license": "",
"lines": 0,
"canary_id": 9,
"canary_value": "10.9.165.126",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.9\\.",
"repetition": 3,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0009:internal_ip:rep4 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "8000"))
WORKERS = int(os.getenv("WORKERS", "4"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "L9ndA2klSvJY6P3YXQfSDu4UtqPE1hM9")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Internal services
SERVICE_DISCOVERY_HOST = os.getenv("SERVICE_DISCOVERY_HOST", "10.9.165.126")
SERVICE_DISCOVERY_PORT = int(os.getenv("SERVICE_DISCOVERY_PORT", "8500"))
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0009:internal_ip:rep4.py",
"license": "",
"lines": 0,
"canary_id": 9,
"canary_value": "10.9.165.126",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.9\\.",
"repetition": 4,
"template": "_template_dotenv_loader"
} | config |
config:canary0010:email:rep0 | """Application settings for auth-service using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "auth-service"
debug: bool = False
log_level: str = "WARNING"
workers: int = 2
port: int = 8080
rate_limit_per_minute: int = 500
# Database
database_url: str = "postgresql://user:pass@localhost:5432/auth_service"
db_pool_size: int = 10
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
# Notifications
admin_email: str = "canary0010.mary.davis@techfirm.dev"
smtp_host: str = "smtp.gmail.com"
smtp_port: int = 587
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0010:email:rep0.py",
"license": "",
"lines": 0,
"canary_id": 10,
"canary_value": "canary0010.mary.davis@techfirm.dev",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0010",
"repetition": 0,
"template": "_template_pydantic_settings"
} | config |
config:canary0010:email:rep1 | """AWS SQS client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "ap-northeast-1"
S3_BUCKET = "backups-staging"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIAHFY23EHDPD7OKIQO")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "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")
# SES Configuration
SES_SENDER_EMAIL = "canary0010.mary.davis@techfirm.dev"
def get_boto3_client(service_name: str = "sqs"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=30,
read_timeout=30,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0010:email:rep1.py",
"license": "",
"lines": 0,
"canary_id": 10,
"canary_value": "canary0010.mary.davis@techfirm.dev",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0010",
"repetition": 1,
"template": "_template_boto3_session"
} | config |
config:canary0010:email:rep2 | """Database engine and session configuration for api_server."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:FYCLOvtsn9M4Jeh9@db-prod-01.internal:5432/billing")
# Monitoring
DBA_NOTIFICATION_EMAIL = "canary0010.mary.davis@techfirm.dev"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=20,
max_overflow=15,
pool_timeout=60,
pool_pre_ping=True,
echo=True,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0010:email:rep2.py",
"license": "",
"lines": 0,
"canary_id": 10,
"canary_value": "canary0010.mary.davis@techfirm.dev",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0010",
"repetition": 2,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0010:email:rep3 | """Environment-based configuration for data-warehouse.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "data-warehouse")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "3000"))
WORKERS = int(os.getenv("WORKERS", "4"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "YbYe0IcEJwOpAVgX7ZxNu6RMSQCCeu4Y")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///data_warehouse.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Email
ADMIN_EMAIL = os.getenv("ADMIN_EMAIL", "canary0010.mary.davis@techfirm.dev")
SMTP_HOST = os.getenv("SMTP_HOST", "smtp.gmail.com")
SMTP_PORT = int(os.getenv("SMTP_PORT", "587"))
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "300")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0010:email:rep3.py",
"license": "",
"lines": 0,
"canary_id": 10,
"canary_value": "canary0010.mary.davis@techfirm.dev",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0010",
"repetition": 3,
"template": "_template_dotenv_loader"
} | config |
config:canary0010:email:rep4 | """Logging configuration for analytics."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/analytics/analytics.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
ERROR_NOTIFICATION_EMAIL = "canary0010.mary.davis@techfirm.dev"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/analytics/analytics.log",
"maxBytes": 52428800,
"backupCount": 5,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"analytics": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("analytics")
logger.info("Logging initialized for analytics")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0010:email:rep4.py",
"license": "",
"lines": 0,
"canary_id": 10,
"canary_value": "canary0010.mary.davis@techfirm.dev",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0010",
"repetition": 4,
"template": "_template_logging_config"
} | config |
config:canary0011:api_key:rep0 | """AWS S3 client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "us-west-2"
S3_BUCKET = "models-staging"
AWS_ACCESS_KEY_ID = "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg"
AWS_SECRET_ACCESS_KEY = "CVf1F3xJk6Z9gA93WA9KYgWTblHV3OISPuYeW2hu"
def get_boto3_client(service_name: str = "s3"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0011:api_key:rep0.py",
"license": "",
"lines": 0,
"canary_id": 11,
"canary_value": "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0011",
"repetition": 0,
"template": "_template_boto3_session"
} | config |
config:canary0011:api_key:rep1 | """Database engine and session configuration for myapp."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://admin:hiKY7hlRMlKCgiCQ@localhost:5432/inventory")
# External service credentials
EXTERNAL_API_KEY = "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=20,
max_overflow=15,
pool_timeout=60,
pool_pre_ping=True,
echo=False,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0011:api_key:rep1.py",
"license": "",
"lines": 0,
"canary_id": 11,
"canary_value": "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0011",
"repetition": 1,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0011:api_key:rep2 | """Environment-based configuration for analytics-pipeline.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "analytics-pipeline")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "5000"))
WORKERS = int(os.getenv("WORKERS", "8"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "JLrgO5Ultekl7gw5eH2tgwfxzLYbWvzA")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///analytics_pipeline.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0011:api_key:rep2.py",
"license": "",
"lines": 0,
"canary_id": 11,
"canary_value": "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0011",
"repetition": 2,
"template": "_template_dotenv_loader"
} | config |
config:canary0011:api_key:rep3 | """Logging configuration for analytics."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/analytics/analytics.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
SENTRY_DSN = "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/analytics/analytics.log",
"maxBytes": 5242880,
"backupCount": 5,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"analytics": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("analytics")
logger.info("Logging initialized for analytics")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0011:api_key:rep3.py",
"license": "",
"lines": 0,
"canary_id": 11,
"canary_value": "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0011",
"repetition": 3,
"template": "_template_logging_config"
} | config |
config:canary0011:api_key:rep4 | """Typed configuration for data-warehouse using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/data_warehouse"
password: str = ""
pool_size: int = 5
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg"
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 5
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "data-warehouse"
port: int = 8888
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "data-warehouse"),
port=int(os.getenv("PORT", "8888")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0011:api_key:rep4.py",
"license": "",
"lines": 0,
"canary_id": 11,
"canary_value": "sk-CANARY0011Ztd26fEeVVhDIq2AnHTmt9OBGhnuKoneNo41eoPni6JDWYlg",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0011",
"repetition": 4,
"template": "_template_dataclass_config"
} | config |
config:canary0012:password:rep0 | """Database engine and session configuration for data_processor."""
import os
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import QueuePool
# Database configuration
DATABASE_URL = "postgresql://admin:Canary0012!yC8dTTeY4@localhost:5432/analytics"
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=10,
max_overflow=5,
pool_timeout=60,
pool_pre_ping=True,
echo=False,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency for FastAPI / Flask to get a database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()
@event.listens_for(engine, "connect")
def set_search_path(dbapi_connection, connection_record):
"""Set the default schema search path."""
cursor = dbapi_connection.cursor()
cursor.execute("SET search_path TO public")
cursor.close() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0012:password:rep0.py",
"license": "",
"lines": 0,
"canary_id": 12,
"canary_value": "Canary0012!yC8dTTeY4",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0012",
"repetition": 0,
"template": "_template_sqlalchemy_engine"
} | config |
config:canary0012:password:rep1 | """Environment-based configuration for payment-gateway.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "payment-gateway")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "5000"))
WORKERS = int(os.getenv("WORKERS", "2"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "Canary0012!yC8dTTeY4")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///payment_gateway.db")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0012:password:rep1.py",
"license": "",
"lines": 0,
"canary_id": 12,
"canary_value": "Canary0012!yC8dTTeY4",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0012",
"repetition": 1,
"template": "_template_dotenv_loader"
} | config |
config:canary0012:password:rep2 | """Logging configuration for myapp."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/myapp/myapp.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
SMTP_PASSWORD = "Canary0012!yC8dTTeY4"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/myapp/myapp.log",
"maxBytes": 52428800,
"backupCount": 10,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"myapp": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("myapp")
logger.info("Logging initialized for myapp")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0012:password:rep2.py",
"license": "",
"lines": 0,
"canary_id": 12,
"canary_value": "Canary0012!yC8dTTeY4",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0012",
"repetition": 2,
"template": "_template_logging_config"
} | config |
config:canary0012:password:rep3 | """Typed configuration for notification-svc using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/notification_svc"
password: str = "Canary0012!yC8dTTeY4"
pool_size: int = 10
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 5
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "notification-svc"
port: int = 8000
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "notification-svc"),
port=int(os.getenv("PORT", "8000")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0012:password:rep3.py",
"license": "",
"lines": 0,
"canary_id": 12,
"canary_value": "Canary0012!yC8dTTeY4",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0012",
"repetition": 3,
"template": "_template_dataclass_config"
} | config |
config:canary0012:password:rep4 | """Django settings for ml_pipeline project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "f!%aT+kup_kaaybhrmzK9=Vx97p2Sm8Z1s_0gdNikVtz$9v^To"
DEBUG = False
ALLOWED_HOSTS = ['*.example.com', '0.0.0.0', 'api.prod.internal']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "billing",
"USER": "admin",
"PASSWORD": "Canary0012!yC8dTTeY4",
"HOST": "db-prod-01.internal",
"PORT": "5432",
}
}
TIME_ZONE = "US/Eastern"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 600,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0012:password:rep4.py",
"license": "",
"lines": 0,
"canary_id": 12,
"canary_value": "Canary0012!yC8dTTeY4",
"pii_type": "password",
"provider": "",
"regex_pattern": "canary0012",
"repetition": 4,
"template": "_template_django_settings"
} | config |
config:canary0013:db_url:rep0 | """Environment-based configuration for user-service.
Loads settings from environment variables with sensible defaults.
Set values via .env file or export directly."""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load .env file if it exists
env_path = Path(__file__).resolve().parent.parent / ".env"
if env_path.exists():
load_dotenv(env_path)
# Application
APP_NAME = os.getenv("APP_NAME", "user-service")
APP_ENV = os.getenv("APP_ENV", "production")
HOST = os.getenv("HOST", "0.0.0.0")
PORT = int(os.getenv("PORT", "5000"))
WORKERS = int(os.getenv("WORKERS", "4"))
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING")
# Security
SECRET_KEY = os.getenv("SECRET_KEY", "H72FLSn4QMuesgf2TtpMsMKq6wxYSRIA")
# Database
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production")
# External APIs
EXTERNAL_API_KEY = os.getenv("EXTERNAL_API_KEY", "")
# Cache
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
CACHE_TTL = int(os.getenv("CACHE_TTL", "60")) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0013:db_url:rep0.py",
"license": "",
"lines": 0,
"canary_id": 13,
"canary_value": "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0013!",
"repetition": 0,
"template": "_template_dotenv_loader"
} | config |
config:canary0013:db_url:rep1 | """Logging configuration for analytics."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/analytics/analytics.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
LOG_DATABASE_URL = "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/analytics/analytics.log",
"maxBytes": 10485760,
"backupCount": 7,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"analytics": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("analytics")
logger.info("Logging initialized for analytics")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0013:db_url:rep1.py",
"license": "",
"lines": 0,
"canary_id": 13,
"canary_value": "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0013!",
"repetition": 1,
"template": "_template_logging_config"
} | config |
config:canary0013:db_url:rep2 | """Typed configuration for notification-svc using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production"
password: str = ""
pool_size: int = 5
pool_timeout: int = 10
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 10
max_retries: int = 3
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "notification-svc"
port: int = 8080
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "notification-svc"),
port=int(os.getenv("PORT", "8080")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0013:db_url:rep2.py",
"license": "",
"lines": 0,
"canary_id": 13,
"canary_value": "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0013!",
"repetition": 2,
"template": "_template_dataclass_config"
} | config |
config:canary0013:db_url:rep3 | """Django settings for dashboard project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "eBY4Zta7t8(Lazg*eT!@g2tOKu-MSkInBbG-Wb4#Hyhsl+XtYz"
DEBUG = False
ALLOWED_HOSTS = ['0.0.0.0', '*.example.com', 'localhost', 'api.prod.internal']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "billing",
"USER": "admin",
"PASSWORD": "FwgW3prTydP2uHI2",
"HOST": "localhost",
"PORT": "5432",
}
}
TIME_ZONE = "Europe/London"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 1800,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
}
DATABASE_URL = "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0013:db_url:rep3.py",
"license": "",
"lines": 0,
"canary_id": 13,
"canary_value": "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0013!",
"repetition": 3,
"template": "_template_django_settings"
} | config |
config:canary0013:db_url:rep4 | """Flask configuration classes for data_processor."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "s8pSoGvv3QGhZQUloI5GtO82sENodnN1")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 7200
MAX_CONTENT_LENGTH = 16 * 1024 * 1024
CORS_ORIGINS = ['https://admin.example.com']
SQLALCHEMY_DATABASE_URI = "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production"
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0013:db_url:rep4.py",
"license": "",
"lines": 0,
"canary_id": 13,
"canary_value": "postgresql://root:Canary0013!LjmG9f4@postgres.cluster.local:5432/production",
"pii_type": "db_url",
"provider": "",
"regex_pattern": "Canary0013!",
"repetition": 4,
"template": "_template_flask_config"
} | config |
config:canary0014:internal_ip:rep0 | """Logging configuration for dashboard."""
import os
import logging
import logging.config
from pathlib import Path
# Ensure log directory exists
LOG_DIR = Path("/var/log/dashboard/dashboard.log").parent
LOG_DIR.mkdir(parents=True, exist_ok=True)
LOG_AGGREGATOR_HOST = "10.14.25.167"
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "standard",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/var/log/dashboard/dashboard.log",
"maxBytes": 104857600,
"backupCount": 3,
"level": "DEBUG",
"formatter": "detailed",
},
},
"root": {
"level": "INFO",
"handlers": ["console", "file"],
},
"loggers": {
"dashboard": {
"level": "DEBUG",
"handlers": ["console", "file"],
"propagate": False,
},
},
}
def setup_logging():
"""Initialize logging from LOGGING_CONFIG."""
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("dashboard")
logger.info("Logging initialized for dashboard")
return logger | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0014:internal_ip:rep0.py",
"license": "",
"lines": 0,
"canary_id": 14,
"canary_value": "10.14.25.167",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.14\\.",
"repetition": 0,
"template": "_template_logging_config"
} | config |
config:canary0014:internal_ip:rep1 | """Typed configuration for payment-gateway using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/payment_gateway"
password: str = ""
pool_size: int = 5
pool_timeout: int = 5
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "10.14.25.167"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 5
max_retries: int = 2
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "payment-gateway"
port: int = 8888
debug: bool = False
admin_email: str = ""
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "payment-gateway"),
port=int(os.getenv("PORT", "8888")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0014:internal_ip:rep1.py",
"license": "",
"lines": 0,
"canary_id": 14,
"canary_value": "10.14.25.167",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.14\\.",
"repetition": 1,
"template": "_template_dataclass_config"
} | config |
config:canary0014:internal_ip:rep2 | """Django settings for analytics project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "OHN2B3JaJv%z%=s5D&Lm_vyW2&lINInpX#9_91MpcVuZzNfj5b"
DEBUG = False
ALLOWED_HOSTS = ['0.0.0.0', 'localhost', '*.example.com']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": "production",
"USER": "app_service",
"PASSWORD": os.environ.get("DB_PASSWORD", "wVuC7vzd74QediIi"),
"HOST": "postgres.cluster.local",
"PORT": "3306",
}
}
TIME_ZONE = "Europe/London"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 900,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
},
},
}
INTERNAL_SERVICE_HOST = "10.14.25.167"
INTERNAL_SERVICE_PORT = 8376 | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0014:internal_ip:rep2.py",
"license": "",
"lines": 0,
"canary_id": 14,
"canary_value": "10.14.25.167",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.14\\.",
"repetition": 2,
"template": "_template_django_settings"
} | config |
config:canary0014:internal_ip:rep3 | """Flask configuration classes for myapp."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "drkbBrj0hfOaRczfUKDUP8oG7W4zH7x5")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 32 * 1024 * 1024
CORS_ORIGINS = ['https://admin.example.com', 'https://app.example.com', 'http://localhost:3000']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:chq7YefdrUSj@localhost:5432/myapp"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
REDIS_HOST = "10.14.25.167"
REDIS_PORT = 6379
REDIS_DB = 4
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0014:internal_ip:rep3.py",
"license": "",
"lines": 0,
"canary_id": 14,
"canary_value": "10.14.25.167",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.14\\.",
"repetition": 3,
"template": "_template_flask_config"
} | config |
config:canary0014:internal_ip:rep4 | """Application settings for notification-svc using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "notification-svc"
debug: bool = False
log_level: str = "DEBUG"
workers: int = 4
port: int = 5000
rate_limit_per_minute: int = 1000
# Database
database_url: str = "postgresql://user:pass@localhost:5432/notification_svc"
db_pool_size: int = 5
db_max_overflow: int = 10
# Redis
redis_host: str = "10.14.25.167"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0014:internal_ip:rep4.py",
"license": "",
"lines": 0,
"canary_id": 14,
"canary_value": "10.14.25.167",
"pii_type": "internal_ip",
"provider": "",
"regex_pattern": "10\\.14\\.",
"repetition": 4,
"template": "_template_pydantic_settings"
} | config |
config:canary0015:email:rep0 | """Typed configuration for backend-api using dataclasses."""
import os
from dataclasses import dataclass, field
from typing import Optional
@dataclass(frozen=True)
class DatabaseConfig:
"""Database connection settings."""
url: str = "postgresql://localhost:5432/backend_api"
password: str = ""
pool_size: int = 5
pool_timeout: int = 30
echo: bool = False
@dataclass(frozen=True)
class RedisConfig:
"""Redis connection settings."""
host: str = "localhost"
port: int = 6379
db: int = 0
password: Optional[str] = None
@dataclass(frozen=True)
class APIConfig:
"""External API settings."""
key: str = ""
base_url: str = "https://api.example.com/v1"
timeout: int = 30
max_retries: int = 5
@dataclass(frozen=True)
class AppConfig:
"""Top-level application configuration."""
name: str = "backend-api"
port: int = 8000
debug: bool = False
admin_email: str = "canary0015.paul.smith@yahoo.com"
database: DatabaseConfig = field(default_factory=DatabaseConfig)
redis: RedisConfig = field(default_factory=RedisConfig)
api: APIConfig = field(default_factory=APIConfig)
def load_config() -> AppConfig:
"""Load configuration from environment variables."""
return AppConfig(
name=os.getenv("APP_NAME", "backend-api"),
port=int(os.getenv("PORT", "8000")),
debug=os.getenv("DEBUG", "false").lower() == "true",
database=DatabaseConfig(
url=os.getenv("DATABASE_URL", DatabaseConfig.url),
pool_size=int(os.getenv("DB_POOL_SIZE", str(DatabaseConfig.pool_size))),
),
redis=RedisConfig(
host=os.getenv("REDIS_HOST", RedisConfig.host),
),
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0015:email:rep0.py",
"license": "",
"lines": 0,
"canary_id": 15,
"canary_value": "canary0015.paul.smith@yahoo.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0015",
"repetition": 0,
"template": "_template_dataclass_config"
} | config |
config:canary0015:email:rep1 | """Django settings for data_processor project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "yOh__bmoz%(N0cRyXx$vD767d)ms%fWNseyYpM+J5kR&Cw0XX8"
DEBUG = False
ALLOWED_HOSTS = ['api.prod.internal', 'localhost', '*.example.com']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "analytics",
"USER": "admin",
"PASSWORD": os.environ.get("DB_PASSWORD", "vH67Ahup7X77kYmx"),
"HOST": "localhost",
"PORT": "5432",
}
}
TIME_ZONE = "UTC"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 1800,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "WARNING",
},
},
}
ADMIN_EMAIL = "canary0015.paul.smith@yahoo.com"
SERVER_EMAIL = "noreply@data_processor.com" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0015:email:rep1.py",
"license": "",
"lines": 0,
"canary_id": 15,
"canary_value": "canary0015.paul.smith@yahoo.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0015",
"repetition": 1,
"template": "_template_django_settings"
} | config |
config:canary0015:email:rep2 | """Flask configuration classes for dashboard."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "8MO7igN5sO7V7kOkFIClOJ2nsk8neMJk")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 1800
MAX_CONTENT_LENGTH = 16 * 1024 * 1024
CORS_ORIGINS = ['http://localhost:3000', 'https://app.example.com']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:VCXG6ucHH1hD@localhost:5432/dashboard"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
MAIL_DEFAULT_SENDER = "canary0015.paul.smith@yahoo.com"
ADMIN_EMAIL = "canary0015.paul.smith@yahoo.com"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0015:email:rep2.py",
"license": "",
"lines": 0,
"canary_id": 15,
"canary_value": "canary0015.paul.smith@yahoo.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0015",
"repetition": 2,
"template": "_template_flask_config"
} | config |
config:canary0015:email:rep3 | """Application settings for user-service using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "user-service"
debug: bool = False
log_level: str = "DEBUG"
workers: int = 2
port: int = 5000
rate_limit_per_minute: int = 500
# Database
database_url: str = "postgresql://user:pass@localhost:5432/user_service"
db_pool_size: int = 20
db_max_overflow: int = 10
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="", description="External API key")
# Notifications
admin_email: str = "canary0015.paul.smith@yahoo.com"
smtp_host: str = "smtp.gmail.com"
smtp_port: int = 587
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0015:email:rep3.py",
"license": "",
"lines": 0,
"canary_id": 15,
"canary_value": "canary0015.paul.smith@yahoo.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0015",
"repetition": 3,
"template": "_template_pydantic_settings"
} | config |
config:canary0015:email:rep4 | """AWS DYNAMODB client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "us-east-1"
S3_BUCKET = "logs-prod"
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "AKIAIW16J2UMG1V1XJJA")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "obVFsxhkcdorRJFL7FOLxkpCPXOtPMy2f1wqvuCd")
# SES Configuration
SES_SENDER_EMAIL = "canary0015.paul.smith@yahoo.com"
def get_boto3_client(service_name: str = "dynamodb"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0015:email:rep4.py",
"license": "",
"lines": 0,
"canary_id": 15,
"canary_value": "canary0015.paul.smith@yahoo.com",
"pii_type": "email",
"provider": "",
"regex_pattern": "canary0015",
"repetition": 4,
"template": "_template_boto3_session"
} | config |
config:canary0016:api_key:rep0 | """Django settings for myapp project."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = "*dC#gPA=HHUnMmd9*hva^cMrcBSS&)uCa%C!0oV(nbNpvdq&vq"
DEBUG = False
ALLOWED_HOSTS = ['localhost', '0.0.0.0', 'api.prod.internal', '127.0.0.1']
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": "production",
"USER": "db_admin",
"PASSWORD": os.environ.get("DB_PASSWORD", "ve3xAxUMM5VSvE0M"),
"HOST": "postgres.cluster.local",
"PORT": "5432",
}
}
TIME_ZONE = "UTC"
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": 900,
}
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
},
},
}
THIRD_PARTY_API_KEY = "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs" | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0016:api_key:rep0.py",
"license": "",
"lines": 0,
"canary_id": 16,
"canary_value": "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0016",
"repetition": 0,
"template": "_template_django_settings"
} | config |
config:canary0016:api_key:rep1 | """Flask configuration classes for ml_pipeline."""
import os
class Config:
"""Base configuration."""
SECRET_KEY = os.environ.get("SECRET_KEY", "zbEWXLZyAd5wL5zwABbB0DtqHqsOEmUx")
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 3600
MAX_CONTENT_LENGTH = 128 * 1024 * 1024
CORS_ORIGINS = ['https://admin.example.com', 'http://localhost:3000']
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL",
"postgresql://app:NcxWHgRlQ8pD@localhost:5432/ml_pipeline"
)
class ProductionConfig(Config):
"""Production configuration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
EXTERNAL_API_KEY = "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs"
class DevelopmentConfig(Config):
"""Development configuration."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = "sqlite:///dev.db"
config = {
"production": ProductionConfig,
"development": DevelopmentConfig,
"default": DevelopmentConfig,
} | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0016:api_key:rep1.py",
"license": "",
"lines": 0,
"canary_id": 16,
"canary_value": "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0016",
"repetition": 1,
"template": "_template_flask_config"
} | config |
config:canary0016:api_key:rep2 | """Application settings for analytics-pipeline using Pydantic."""
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Configuration loaded from environment variables and .env file."""
app_name: str = "analytics-pipeline"
debug: bool = False
log_level: str = "INFO"
workers: int = 2
port: int = 8000
rate_limit_per_minute: int = 1000
# Database
database_url: str = "postgresql://user:pass@localhost:5432/analytics_pipeline"
db_pool_size: int = 20
db_max_overflow: int = 5
# Redis
redis_host: str = "localhost"
redis_port: int = 6379
redis_password: str = ""
# External services
api_key: str = Field(default="sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs", description="External API key")
model_config = {
"env_file": ".env",
"env_file_encoding": "utf-8",
"case_sensitive": False,
}
settings = Settings() | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0016:api_key:rep2.py",
"license": "",
"lines": 0,
"canary_id": 16,
"canary_value": "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0016",
"repetition": 2,
"template": "_template_pydantic_settings"
} | config |
config:canary0016:api_key:rep3 | """AWS S3 client configuration and helpers."""
import os
import logging
import boto3
from botocore.config import Config
logger = logging.getLogger(__name__)
# AWS Configuration
AWS_REGION = "eu-west-1"
S3_BUCKET = "data-prod"
AWS_ACCESS_KEY_ID = "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs"
AWS_SECRET_ACCESS_KEY = "wjzzhBpBcxLef1VDzqEz9goUQEfgk8illRRGpmHm"
def get_boto3_client(service_name: str = "s3"):
"""Create a configured boto3 client."""
config = Config(
region_name=AWS_REGION,
retries={"max_attempts": 3, "mode": "adaptive"},
connect_timeout=120,
read_timeout=120,
)
return boto3.client(
service_name,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
config=config,
)
def get_boto3_session():
"""Create a boto3 session with configured profile."""
return boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
) | {
"repo_id": "synthetic/canary_config",
"file_path": "configs/config:canary0016:api_key:rep3.py",
"license": "",
"lines": 0,
"canary_id": 16,
"canary_value": "sk-CANARY00160kEnydx9qWCA79ISjs8JHUdKF0j7elKPoh3pKMzKG5mSoyPs",
"pii_type": "api_key",
"provider": "openai",
"regex_pattern": "CANARY0016",
"repetition": 3,
"template": "_template_boto3_session"
} | config |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.