frdel commited on
Commit
7f9e34a
·
1 Parent(s): 3c970e3

projects continued

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. agent.py +38 -7
  2. models.py +46 -30
  3. python/api/api_log_get.py +1 -1
  4. python/api/api_message.py +2 -1
  5. python/api/api_reset_chat.py +1 -1
  6. python/api/api_terminate_chat.py +1 -1
  7. python/api/chat_create.py +32 -0
  8. python/api/chat_export.py +1 -1
  9. python/api/chat_remove.py +1 -1
  10. python/api/chat_reset.py +1 -1
  11. python/api/ctx_window_get.py +1 -1
  12. python/api/get_work_dir_files.py +2 -2
  13. python/api/history_get.py +1 -1
  14. python/api/import_knowledge.py +1 -1
  15. python/api/memory_dashboard.py +8 -20
  16. python/api/message.py +1 -1
  17. python/api/nudge.py +1 -1
  18. python/api/pause.py +1 -1
  19. python/api/poll.py +2 -1
  20. python/api/projects.py +29 -30
  21. python/api/scheduler_task_delete.py +1 -1
  22. python/api/synthesize.py +4 -2
  23. python/api/transcribe.py +4 -2
  24. python/extensions/error_format/_10_mask_errors.py +2 -2
  25. python/extensions/hist_add_before/_10_mask_content.py +2 -2
  26. python/extensions/reasoning_stream_chunk/_10_mask_stream.py +2 -2
  27. python/extensions/response_stream_chunk/_10_mask_stream.py +2 -3
  28. python/extensions/system_prompt/_10_system_prompt.py +5 -5
  29. python/extensions/system_prompt/_20_behaviour_prompt.py +1 -1
  30. python/extensions/tool_execute_after/_10_mask_secrets.py +2 -2
  31. python/extensions/tool_execute_before/_10_unmask_secrets.py +2 -2
  32. python/extensions/util_model_call_before/_10_mask_secrets.py +2 -2
  33. python/helpers/api.py +8 -4
  34. python/helpers/context.py +46 -0
  35. python/helpers/files.py +65 -14
  36. python/helpers/log.py +37 -23
  37. python/helpers/memory.py +58 -13
  38. python/helpers/print_style.py +4 -3
  39. python/helpers/projects.py +174 -94
  40. python/helpers/secrets.py +127 -59
  41. python/helpers/settings.py +3 -4
  42. python/helpers/vector_db.py +2 -1
  43. python/tools/behaviour_adjustment.py +1 -1
  44. python/tools/browser_agent.py +4 -4
  45. tests/rate_limiter_test.py +2 -2
  46. webui/components/chat/top-section/chat-top-store.js +12 -0
  47. webui/components/chat/top-section/chat-top.html +43 -0
  48. webui/components/modals/file-browser/file-browser-store.js +65 -28
  49. webui/components/modals/file-browser/file-browser.html +1 -0
  50. webui/components/notifications/notification-store.js +2 -1
agent.py CHANGED
@@ -11,7 +11,7 @@ from enum import Enum
11
  import uuid
12
  import models
13
 
14
- from python.helpers import extract_tools, files, errors, history, tokens
15
  from python.helpers import dirty_json
16
  from python.helpers.print_style import PrintStyle
17
 
@@ -55,12 +55,22 @@ class AgentContext:
55
  last_message: datetime | None = None,
56
  data: dict | None = None,
57
  output_data: dict | None = None,
 
58
  ):
59
- # build context
60
  self.id = id or AgentContext.generate_id()
 
 
 
 
 
 
 
 
61
  self.name = name
62
  self.config = config
63
  self.log = log or Log.Log()
 
64
  self.agent0 = agent0 or Agent(0, self.config, self)
65
  self.paused = paused
66
  self.streaming_agent = streaming_agent
@@ -73,15 +83,32 @@ class AgentContext:
73
  self.data = data or {}
74
  self.output_data = output_data or {}
75
 
76
- existing = self._contexts.get(self.id, None)
77
- if existing:
78
- AgentContext.remove(self.id)
79
- self._contexts[self.id] = self
80
 
81
  @staticmethod
82
  def get(id: str):
83
  return AgentContext._contexts.get(id, None)
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  @staticmethod
86
  def first():
87
  if not AgentContext._contexts:
@@ -123,6 +150,10 @@ class AgentContext:
123
  # recursive is not used now, prepared for context hierarchy
124
  self.data[key] = value
125
 
 
 
 
 
126
  def set_output_data(self, key: str, value: Any, recursive: bool = True):
127
  # recursive is not used now, prepared for context hierarchy
128
  self.output_data[key] = value
@@ -688,7 +719,7 @@ class Agent:
688
  response, _reasoning = await call_data["model"].unified_call(
689
  system_message=call_data["system"],
690
  user_message=call_data["message"],
691
- response_callback=stream_callback,
692
  rate_limiter_callback=self.rate_limiter_callback if not call_data["background"] else None,
693
  )
694
 
 
11
  import uuid
12
  import models
13
 
14
+ from python.helpers import extract_tools, files, errors, history, tokens, context as context_helper
15
  from python.helpers import dirty_json
16
  from python.helpers.print_style import PrintStyle
17
 
 
55
  last_message: datetime | None = None,
56
  data: dict | None = None,
57
  output_data: dict | None = None,
58
+ set_current: bool = False,
59
  ):
60
+ # initialize context
61
  self.id = id or AgentContext.generate_id()
62
+ existing = self._contexts.get(self.id, None)
63
+ if existing:
64
+ AgentContext.remove(self.id)
65
+ self._contexts[self.id] = self
66
+ if set_current:
67
+ AgentContext.set_current(self.id)
68
+
69
+ # initialize state
70
  self.name = name
71
  self.config = config
72
  self.log = log or Log.Log()
73
+ self.log.context = self
74
  self.agent0 = agent0 or Agent(0, self.config, self)
75
  self.paused = paused
76
  self.streaming_agent = streaming_agent
 
83
  self.data = data or {}
84
  self.output_data = output_data or {}
85
 
86
+
 
 
 
87
 
88
  @staticmethod
89
  def get(id: str):
90
  return AgentContext._contexts.get(id, None)
91
 
92
+ @staticmethod
93
+ def use(id: str):
94
+ context = AgentContext.get(id)
95
+ if context:
96
+ AgentContext.set_current(id)
97
+ else:
98
+ AgentContext.set_current("")
99
+ return context
100
+
101
+ @staticmethod
102
+ def current():
103
+ ctxid = context_helper.get_context_data("agent_context_id","")
104
+ if not ctxid:
105
+ return None
106
+ return AgentContext.get(ctxid)
107
+
108
+ @staticmethod
109
+ def set_current(ctxid: str):
110
+ context_helper.set_context_data("agent_context_id", ctxid)
111
+
112
  @staticmethod
113
  def first():
114
  if not AgentContext._contexts:
 
150
  # recursive is not used now, prepared for context hierarchy
151
  self.data[key] = value
152
 
153
+ def get_output_data(self, key: str, recursive: bool = True):
154
+ # recursive is not used now, prepared for context hierarchy
155
+ return self.output_data.get(key, None)
156
+
157
  def set_output_data(self, key: str, value: Any, recursive: bool = True):
158
  # recursive is not used now, prepared for context hierarchy
159
  self.output_data[key] = value
 
719
  response, _reasoning = await call_data["model"].unified_call(
720
  system_message=call_data["system"],
721
  user_message=call_data["message"],
722
+ response_callback=stream_callback if call_data["callback"] else None,
723
  rate_limiter_callback=self.rate_limiter_callback if not call_data["background"] else None,
724
  )
725
 
models.py CHANGED
@@ -487,6 +487,7 @@ class LiteLLMChatWrapper(SimpleChatModel):
487
  call_kwargs: dict[str, Any] = {**self.kwargs, **kwargs}
488
  max_retries: int = int(call_kwargs.pop("a0_retry_attempts", 2))
489
  retry_delay_s: float = float(call_kwargs.pop("a0_retry_delay_seconds", 1.5))
 
490
 
491
  # results
492
  result = ChatGenerationResult()
@@ -499,41 +500,52 @@ class LiteLLMChatWrapper(SimpleChatModel):
499
  _completion = await acompletion(
500
  model=self.model_name,
501
  messages=msgs_conv,
502
- stream=True,
503
  **call_kwargs,
504
  )
505
 
506
- # iterate over chunks
507
- async for chunk in _completion: # type: ignore
508
- got_any_chunk = True
509
- # parse chunk
510
- parsed = _parse_chunk(chunk)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511
  output = result.add_chunk(parsed)
512
-
513
- # collect reasoning delta and call callbacks
514
- if output["reasoning_delta"]:
515
- if reasoning_callback:
516
- await reasoning_callback(output["reasoning_delta"], result.reasoning)
517
- if tokens_callback:
518
- await tokens_callback(
519
- output["reasoning_delta"],
520
- approximate_tokens(output["reasoning_delta"]),
521
- )
522
- # Add output tokens to rate limiter if configured
523
- if limiter:
524
- limiter.add(output=approximate_tokens(output["reasoning_delta"]))
525
- # collect response delta and call callbacks
526
- if output["response_delta"]:
527
- if response_callback:
528
- await response_callback(output["response_delta"], result.response)
529
- if tokens_callback:
530
- await tokens_callback(
531
- output["response_delta"],
532
- approximate_tokens(output["response_delta"]),
533
- )
534
- # Add output tokens to rate limiter if configured
535
- if limiter:
536
  limiter.add(output=approximate_tokens(output["response_delta"]))
 
 
537
 
538
  # Successful completion of stream
539
  return result.response, result.reasoning
@@ -804,6 +816,10 @@ def _parse_chunk(chunk: Any) -> ChatChunk:
804
  delta.get("reasoning_content", "")
805
  if isinstance(delta, dict)
806
  else getattr(delta, "reasoning_content", "")
 
 
 
 
807
  )
808
 
809
  return ChatChunk(reasoning_delta=reasoning_delta, response_delta=response_delta)
 
487
  call_kwargs: dict[str, Any] = {**self.kwargs, **kwargs}
488
  max_retries: int = int(call_kwargs.pop("a0_retry_attempts", 2))
489
  retry_delay_s: float = float(call_kwargs.pop("a0_retry_delay_seconds", 1.5))
490
+ stream = reasoning_callback is not None or response_callback is not None or tokens_callback is not None
491
 
492
  # results
493
  result = ChatGenerationResult()
 
500
  _completion = await acompletion(
501
  model=self.model_name,
502
  messages=msgs_conv,
503
+ stream=stream,
504
  **call_kwargs,
505
  )
506
 
507
+ if stream:
508
+ # iterate over chunks
509
+ async for chunk in _completion: # type: ignore
510
+ got_any_chunk = True
511
+ # parse chunk
512
+ parsed = _parse_chunk(chunk)
513
+ output = result.add_chunk(parsed)
514
+
515
+ # collect reasoning delta and call callbacks
516
+ if output["reasoning_delta"]:
517
+ if reasoning_callback:
518
+ await reasoning_callback(output["reasoning_delta"], result.reasoning)
519
+ if tokens_callback:
520
+ await tokens_callback(
521
+ output["reasoning_delta"],
522
+ approximate_tokens(output["reasoning_delta"]),
523
+ )
524
+ # Add output tokens to rate limiter if configured
525
+ if limiter:
526
+ limiter.add(output=approximate_tokens(output["reasoning_delta"]))
527
+ # collect response delta and call callbacks
528
+ if output["response_delta"]:
529
+ if response_callback:
530
+ await response_callback(output["response_delta"], result.response)
531
+ if tokens_callback:
532
+ await tokens_callback(
533
+ output["response_delta"],
534
+ approximate_tokens(output["response_delta"]),
535
+ )
536
+ # Add output tokens to rate limiter if configured
537
+ if limiter:
538
+ limiter.add(output=approximate_tokens(output["response_delta"]))
539
+
540
+ # non-stream response
541
+ else:
542
+ parsed = _parse_chunk(_completion)
543
  output = result.add_chunk(parsed)
544
+ if limiter:
545
+ if output["response_delta"]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
546
  limiter.add(output=approximate_tokens(output["response_delta"]))
547
+ if output["reasoning_delta"]:
548
+ limiter.add(output=approximate_tokens(output["reasoning_delta"]))
549
 
550
  # Successful completion of stream
551
  return result.response, result.reasoning
 
816
  delta.get("reasoning_content", "")
817
  if isinstance(delta, dict)
818
  else getattr(delta, "reasoning_content", "")
819
+ ) or (
820
+ message.get("reasoning_content", "")
821
+ if isinstance(message, dict)
822
+ else getattr(message, "reasoning_content", "")
823
  )
824
 
825
  return ChatChunk(reasoning_delta=reasoning_delta, response_delta=response_delta)
python/api/api_log_get.py CHANGED
@@ -32,7 +32,7 @@ class ApiLogGet(ApiHandler):
32
  return Response('{"error": "context_id is required"}', status=400, mimetype="application/json")
33
 
34
  # Get context
35
- context = AgentContext.get(context_id)
36
  if not context:
37
  return Response('{"error": "Context not found"}', status=404, mimetype="application/json")
38
 
 
32
  return Response('{"error": "context_id is required"}', status=400, mimetype="application/json")
33
 
34
  # Get context
35
+ context = AgentContext.use(context_id)
36
  if not context:
37
  return Response('{"error": "Context not found"}', status=404, mimetype="application/json")
38
 
python/api/api_message.py CHANGED
@@ -68,12 +68,13 @@ class ApiMessage(ApiHandler):
68
 
69
  # Get or create context
70
  if context_id:
71
- context = AgentContext.get(context_id)
72
  if not context:
73
  return Response('{"error": "Context not found"}', status=404, mimetype="application/json")
74
  else:
75
  config = initialize_agent()
76
  context = AgentContext(config=config, type=AgentContextType.USER)
 
77
  context_id = context.id
78
 
79
  # Update chat lifetime
 
68
 
69
  # Get or create context
70
  if context_id:
71
+ context = AgentContext.use(context_id)
72
  if not context:
73
  return Response('{"error": "Context not found"}', status=404, mimetype="application/json")
74
  else:
75
  config = initialize_agent()
76
  context = AgentContext(config=config, type=AgentContextType.USER)
77
+ AgentContext.use(context.id)
78
  context_id = context.id
79
 
80
  # Update chat lifetime
python/api/api_reset_chat.py CHANGED
@@ -35,7 +35,7 @@ class ApiResetChat(ApiHandler):
35
  )
36
 
37
  # Check if context exists
38
- context = AgentContext.get(context_id)
39
  if not context:
40
  return Response(
41
  '{"error": "Chat context not found"}',
 
35
  )
36
 
37
  # Check if context exists
38
+ context = AgentContext.use(context_id)
39
  if not context:
40
  return Response(
41
  '{"error": "Chat context not found"}',
python/api/api_terminate_chat.py CHANGED
@@ -35,7 +35,7 @@ class ApiTerminateChat(ApiHandler):
35
  )
36
 
37
  # Check if context exists
38
- context = AgentContext.get(context_id)
39
  if not context:
40
  return Response(
41
  '{"error": "Chat context not found"}',
 
35
  )
36
 
37
  # Check if context exists
38
+ context = AgentContext.use(context_id)
39
  if not context:
40
  return Response(
41
  '{"error": "Chat context not found"}',
python/api/chat_create.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from python.helpers.api import ApiHandler, Input, Output, Request, Response
2
+
3
+
4
+ from python.helpers import projects, guids
5
+ from agent import AgentContext
6
+
7
+
8
+ class CreateChat(ApiHandler):
9
+ async def process(self, input: Input, request: Request) -> Output:
10
+ current_ctxid = input.get("current_context", "") # current context id
11
+ new_ctxid = input.get("new_context", guids.generate_id()) # given or new guid
12
+
13
+ # context instance - get or create
14
+ current_context = AgentContext.get(current_ctxid)
15
+
16
+ # get/create new context
17
+ new_context = self.use_context(new_ctxid)
18
+
19
+ # copy selected data from current to new context
20
+ if current_context:
21
+ current_data_1 = current_context.get_data(projects.CONTEXT_DATA_KEY_PROJECT)
22
+ if current_data_1:
23
+ new_context.set_data(projects.CONTEXT_DATA_KEY_PROJECT, current_data_1)
24
+ current_data_2 = current_context.get_output_data(projects.CONTEXT_DATA_KEY_PROJECT)
25
+ if current_data_2:
26
+ new_context.set_output_data(projects.CONTEXT_DATA_KEY_PROJECT, current_data_2)
27
+
28
+ return {
29
+ "ok": True,
30
+ "ctxid": new_context.id,
31
+ "message": "Context created.",
32
+ }
python/api/chat_export.py CHANGED
@@ -8,7 +8,7 @@ class ExportChat(ApiHandler):
8
  if not ctxid:
9
  raise Exception("No context id provided")
10
 
11
- context = self.get_context(ctxid)
12
  content = persist_chat.export_json_chat(context)
13
  return {
14
  "message": "Chats exported.",
 
8
  if not ctxid:
9
  raise Exception("No context id provided")
10
 
11
+ context = self.use_context(ctxid)
12
  content = persist_chat.export_json_chat(context)
13
  return {
14
  "message": "Chats exported.",
python/api/chat_remove.py CHANGED
@@ -8,7 +8,7 @@ class RemoveChat(ApiHandler):
8
  async def process(self, input: Input, request: Request) -> Output:
9
  ctxid = input.get("context", "")
10
 
11
- context = AgentContext.get(ctxid)
12
  if context:
13
  # stop processing any tasks
14
  context.reset()
 
8
  async def process(self, input: Input, request: Request) -> Output:
9
  ctxid = input.get("context", "")
10
 
11
+ context = AgentContext.use(ctxid)
12
  if context:
13
  # stop processing any tasks
14
  context.reset()
python/api/chat_reset.py CHANGED
@@ -9,7 +9,7 @@ class Reset(ApiHandler):
9
  ctxid = input.get("context", "")
10
 
11
  # context instance - get or create
12
- context = self.get_context(ctxid)
13
  context.reset()
14
  persist_chat.save_tmp_chat(context)
15
  persist_chat.remove_msg_files(ctxid)
 
9
  ctxid = input.get("context", "")
10
 
11
  # context instance - get or create
12
+ context = self.use_context(ctxid)
13
  context.reset()
14
  persist_chat.save_tmp_chat(context)
15
  persist_chat.remove_msg_files(ctxid)
python/api/ctx_window_get.py CHANGED
@@ -6,7 +6,7 @@ from python.helpers import tokens
6
  class GetCtxWindow(ApiHandler):
7
  async def process(self, input: Input, request: Request) -> Output:
8
  ctxid = input.get("context", [])
9
- context = self.get_context(ctxid)
10
  agent = context.streaming_agent or context.agent0
11
  window = agent.get_data(agent.DATA_NAME_CTX_WINDOW)
12
  if not window or not isinstance(window, dict):
 
6
  class GetCtxWindow(ApiHandler):
7
  async def process(self, input: Input, request: Request) -> Output:
8
  ctxid = input.get("context", [])
9
+ context = self.use_context(ctxid)
10
  agent = context.streaming_agent or context.agent0
11
  window = agent.get_data(agent.DATA_NAME_CTX_WINDOW)
12
  if not window or not isinstance(window, dict):
python/api/get_work_dir_files.py CHANGED
@@ -1,6 +1,6 @@
1
  from python.helpers.api import ApiHandler, Request, Response
2
  from python.helpers.file_browser import FileBrowser
3
- from python.helpers import runtime
4
 
5
  class GetWorkDirFiles(ApiHandler):
6
 
@@ -15,7 +15,7 @@ class GetWorkDirFiles(ApiHandler):
15
  # current_path = "work_dir"
16
  # else:
17
  # current_path = "root"
18
- current_path = "root"
19
 
20
  # browser = FileBrowser()
21
  # result = browser.get_files(current_path)
 
1
  from python.helpers.api import ApiHandler, Request, Response
2
  from python.helpers.file_browser import FileBrowser
3
+ from python.helpers import runtime, files
4
 
5
  class GetWorkDirFiles(ApiHandler):
6
 
 
15
  # current_path = "work_dir"
16
  # else:
17
  # current_path = "root"
18
+ current_path = "/a0"
19
 
20
  # browser = FileBrowser()
21
  # result = browser.get_files(current_path)
python/api/history_get.py CHANGED
@@ -4,7 +4,7 @@ from python.helpers.api import ApiHandler, Request, Response
4
  class GetHistory(ApiHandler):
5
  async def process(self, input: dict, request: Request) -> dict | Response:
6
  ctxid = input.get("context", [])
7
- context = self.get_context(ctxid)
8
  agent = context.streaming_agent or context.agent0
9
  history = agent.history.output_text()
10
  size = agent.history.get_tokens()
 
4
  class GetHistory(ApiHandler):
5
  async def process(self, input: dict, request: Request) -> dict | Response:
6
  ctxid = input.get("context", [])
7
+ context = self.use_context(ctxid)
8
  agent = context.streaming_agent or context.agent0
9
  history = agent.history.output_text()
10
  size = agent.history.get_tokens()
python/api/import_knowledge.py CHANGED
@@ -13,7 +13,7 @@ class ImportKnowledge(ApiHandler):
13
  if not ctxid:
14
  raise Exception("No context id provided")
15
 
16
- context = self.get_context(ctxid)
17
 
18
  file_list = request.files.getlist("files[]")
19
  KNOWLEDGE_FOLDER = files.get_abs_path(memory.get_custom_knowledge_subdir_abs(context.agent0), "main")
 
13
  if not ctxid:
14
  raise Exception("No context id provided")
15
 
16
+ context = self.use_context(ctxid)
17
 
18
  file_list = request.files.getlist("files[]")
19
  KNOWLEDGE_FOLDER = files.get_abs_path(memory.get_custom_knowledge_subdir_abs(context.agent0), "main")
python/api/memory_dashboard.py CHANGED
@@ -1,8 +1,9 @@
1
  from python.helpers.api import ApiHandler, Request, Response
2
- from python.helpers.memory import Memory
3
  from python.helpers import files
4
  from models import ModelConfig, ModelType
5
  from langchain_core.documents import Document
 
6
 
7
 
8
  class MemoryDashboard(ApiHandler):
@@ -113,21 +114,13 @@ class MemoryDashboard(ApiHandler):
113
  # Fallback to default if no context available
114
  return {"success": True, "memory_subdir": "default"}
115
 
116
- # Import AgentContext here to avoid circular imports
117
- from agent import AgentContext
118
-
119
- # Get the context and extract memory subdirectory
120
- context = AgentContext.get(context_id)
121
- if (
122
- context
123
- and hasattr(context, "config")
124
- and hasattr(context.config, "memory_subdir")
125
- ):
126
- memory_subdir = context.config.memory_subdir or "default"
127
- return {"success": True, "memory_subdir": memory_subdir}
128
- else:
129
  return {"success": True, "memory_subdir": "default"}
130
 
 
 
 
131
  except Exception:
132
  return {
133
  "success": True, # Still success, just fallback to default
@@ -138,12 +131,7 @@ class MemoryDashboard(ApiHandler):
138
  """Get available memory subdirectories."""
139
  try:
140
  # Get subdirectories from memory folder
141
- subdirs = files.get_subdirectories("memory", exclude="embeddings")
142
-
143
- # Ensure 'default' is always available
144
- if "default" not in subdirs:
145
- subdirs.insert(0, "default")
146
-
147
  return {"success": True, "subdirs": subdirs}
148
  except Exception as e:
149
  return {
 
1
  from python.helpers.api import ApiHandler, Request, Response
2
+ from python.helpers.memory import Memory, get_existing_memory_subdirs, get_context_memory_subdir
3
  from python.helpers import files
4
  from models import ModelConfig, ModelType
5
  from langchain_core.documents import Document
6
+ from agent import AgentContext
7
 
8
 
9
  class MemoryDashboard(ApiHandler):
 
114
  # Fallback to default if no context available
115
  return {"success": True, "memory_subdir": "default"}
116
 
117
+ context = AgentContext.use(context_id)
118
+ if not context:
 
 
 
 
 
 
 
 
 
 
 
119
  return {"success": True, "memory_subdir": "default"}
120
 
121
+ memory_subdir = get_context_memory_subdir(context)
122
+ return {"success": True, "memory_subdir": memory_subdir or "default"}
123
+
124
  except Exception:
125
  return {
126
  "success": True, # Still success, just fallback to default
 
131
  """Get available memory subdirectories."""
132
  try:
133
  # Get subdirectories from memory folder
134
+ subdirs = get_existing_memory_subdirs()
 
 
 
 
 
135
  return {"success": True, "subdirs": subdirs}
136
  except Exception as e:
137
  return {
python/api/message.py CHANGED
@@ -53,7 +53,7 @@ class Message(ApiHandler):
53
  message = text
54
 
55
  # Obtain agent context
56
- context = self.get_context(ctxid)
57
 
58
  # Store attachments in agent data
59
  # context.agent0.set_data("attachments", attachment_paths)
 
53
  message = text
54
 
55
  # Obtain agent context
56
+ context = self.use_context(ctxid)
57
 
58
  # Store attachments in agent data
59
  # context.agent0.set_data("attachments", attachment_paths)
python/api/nudge.py CHANGED
@@ -6,7 +6,7 @@ class Nudge(ApiHandler):
6
  if not ctxid:
7
  raise Exception("No context id provided")
8
 
9
- context = self.get_context(ctxid)
10
  context.nudge()
11
 
12
  msg = "Process reset, agent nudged."
 
6
  if not ctxid:
7
  raise Exception("No context id provided")
8
 
9
+ context = self.use_context(ctxid)
10
  context.nudge()
11
 
12
  msg = "Process reset, agent nudged."
python/api/pause.py CHANGED
@@ -8,7 +8,7 @@ class Pause(ApiHandler):
8
  ctxid = input.get("context", "")
9
 
10
  # context instance - get or create
11
- context = self.get_context(ctxid)
12
 
13
  context.paused = paused
14
 
 
8
  ctxid = input.get("context", "")
9
 
10
  # context instance - get or create
11
+ context = self.use_context(ctxid)
12
 
13
  context.paused = paused
14
 
python/api/poll.py CHANGED
@@ -20,7 +20,7 @@ class Poll(ApiHandler):
20
 
21
  # context instance - get or create only if ctxid is provided
22
  if ctxid:
23
- context = self.get_context(ctxid)
24
  else:
25
  context = None
26
 
@@ -106,6 +106,7 @@ class Poll(ApiHandler):
106
 
107
  # data from this server
108
  return {
 
109
  "context": context.id if context else "",
110
  "contexts": ctxs,
111
  "tasks": tasks,
 
20
 
21
  # context instance - get or create only if ctxid is provided
22
  if ctxid:
23
+ context = self.use_context(ctxid, create_if_not_exists=False)
24
  else:
25
  context = None
26
 
 
106
 
107
  # data from this server
108
  return {
109
+ "deselect_chat": ctxid and not context,
110
  "context": context.id if context else "",
111
  "contexts": ctxs,
112
  "tasks": tasks,
python/api/projects.py CHANGED
@@ -5,24 +5,26 @@ from python.helpers import projects
5
  class Projects(ApiHandler):
6
  async def process(self, input: Input, request: Request) -> Output:
7
  action = input.get("action", "")
 
 
 
 
8
 
9
  try:
10
- if action == "list-active":
11
  data = self.get_active_projects_list()
12
- elif action == "list-archive":
13
- data = self.get_archived_projects_list()
14
  elif action == "load":
15
- data = self.load_project(input.get("path", None))
16
  elif action == "create":
17
  data = self.create_project(input.get("project", None))
18
  elif action == "update":
19
  data = self.update_project(input.get("project", None))
20
  elif action == "delete":
21
- data = self.delete_project(input.get("path", None))
22
  elif action == "activate":
23
- data = self.activate_project(input.get("context_id", None), input.get("path", None))
24
  elif action == "deactivate":
25
- data = self.deactivate_project(input.get("context_id", None))
26
  else:
27
  raise Exception("Invalid action")
28
 
@@ -39,41 +41,38 @@ class Projects(ApiHandler):
39
  def get_active_projects_list(self):
40
  return projects.get_active_projects_list()
41
 
42
- def get_archived_projects_list(self):
43
- return projects.get_archived_projects_list()
44
-
45
  def create_project(self, project: dict|None):
46
  if project is None:
47
  raise Exception("Project data is required")
48
  data = projects.BasicProjectData(**project)
49
- path = projects.create_project(project["name"], data)
50
- return projects.load_edit_project_data(path)
51
 
52
- def load_project(self, path: str|None):
53
- if path is None:
54
- raise Exception("Project path is required")
55
- return projects.load_edit_project_data(path)
56
 
57
  def update_project(self, project: dict|None):
58
  if project is None:
59
  raise Exception("Project data is required")
60
- data = projects.BasicProjectData(**project)
61
- path = projects.update_project(project["path"], data)
62
- return projects.load_edit_project_data(path)
63
 
64
- def delete_project(self, path: str|None):
65
- if path is None:
66
- raise Exception("Project path is required")
67
- return projects.delete_project(path)
68
 
69
- def activate_project(self, context_id: str|None, path: str|None):
70
- if context_id is None:
71
  raise Exception("Context ID is required")
72
- if path is None:
73
- raise Exception("Project path is required")
74
- return projects.activate_project(context_id, path)
75
 
76
  def deactivate_project(self, context_id: str|None):
77
- if context_id is None:
78
  raise Exception("Context ID is required")
79
- return projects.deactivate_project(context_id)
 
5
  class Projects(ApiHandler):
6
  async def process(self, input: Input, request: Request) -> Output:
7
  action = input.get("action", "")
8
+ ctxid = input.get("context_id", None)
9
+
10
+ if ctxid:
11
+ _context = self.use_context(ctxid)
12
 
13
  try:
14
+ if action == "list":
15
  data = self.get_active_projects_list()
 
 
16
  elif action == "load":
17
+ data = self.load_project(input.get("name", None))
18
  elif action == "create":
19
  data = self.create_project(input.get("project", None))
20
  elif action == "update":
21
  data = self.update_project(input.get("project", None))
22
  elif action == "delete":
23
+ data = self.delete_project(input.get("name", None))
24
  elif action == "activate":
25
+ data = self.activate_project(ctxid, input.get("name", None))
26
  elif action == "deactivate":
27
+ data = self.deactivate_project(ctxid)
28
  else:
29
  raise Exception("Invalid action")
30
 
 
41
  def get_active_projects_list(self):
42
  return projects.get_active_projects_list()
43
 
 
 
 
44
  def create_project(self, project: dict|None):
45
  if project is None:
46
  raise Exception("Project data is required")
47
  data = projects.BasicProjectData(**project)
48
+ name = projects.create_project(project["name"], data)
49
+ return projects.load_edit_project_data(name)
50
 
51
+ def load_project(self, name: str|None):
52
+ if name is None:
53
+ raise Exception("Project name is required")
54
+ return projects.load_edit_project_data(name)
55
 
56
  def update_project(self, project: dict|None):
57
  if project is None:
58
  raise Exception("Project data is required")
59
+ data = projects.EditProjectData(**project)
60
+ name = projects.update_project(project["name"], data)
61
+ return projects.load_edit_project_data(name)
62
 
63
+ def delete_project(self, name: str|None):
64
+ if name is None:
65
+ raise Exception("Project name is required")
66
+ return projects.delete_project(name)
67
 
68
+ def activate_project(self, context_id: str|None, name: str|None):
69
+ if not context_id:
70
  raise Exception("Context ID is required")
71
+ if not name:
72
+ raise Exception("Project name is required")
73
+ return projects.activate_project(context_id, name)
74
 
75
  def deactivate_project(self, context_id: str|None):
76
+ if not context_id:
77
  raise Exception("Context ID is required")
78
+ return projects.deactivate_project(context_id)
python/api/scheduler_task_delete.py CHANGED
@@ -30,7 +30,7 @@ class SchedulerTaskDelete(ApiHandler):
30
 
31
  context = None
32
  if task.context_id:
33
- context = self.get_context(task.context_id)
34
 
35
  # If the task is running, update its state to IDLE first
36
  if task.state == TaskState.RUNNING:
 
30
 
31
  context = None
32
  if task.context_id:
33
+ context = self.use_context(task.context_id)
34
 
35
  # If the task is running, update its state to IDLE first
36
  if task.state == TaskState.RUNNING:
python/api/synthesize.py CHANGED
@@ -7,9 +7,11 @@ from python.helpers import runtime, settings, kokoro_tts
7
  class Synthesize(ApiHandler):
8
  async def process(self, input: dict, request: Request) -> dict | Response:
9
  text = input.get("text", "")
10
- # ctxid = input.get("ctxid", "")
11
 
12
- # context = self.get_context(ctxid)
 
 
13
  # if not await kokoro_tts.is_downloaded():
14
  # context.log.log(type="info", content="Kokoro TTS model is currently being initialized, please wait...")
15
 
 
7
  class Synthesize(ApiHandler):
8
  async def process(self, input: dict, request: Request) -> dict | Response:
9
  text = input.get("text", "")
10
+ ctxid = input.get("ctxid", "")
11
 
12
+ if ctxid:
13
+ context = self.use_context(ctxid)
14
+
15
  # if not await kokoro_tts.is_downloaded():
16
  # context.log.log(type="info", content="Kokoro TTS model is currently being initialized, please wait...")
17
 
python/api/transcribe.py CHANGED
@@ -5,9 +5,11 @@ from python.helpers import runtime, settings, whisper
5
  class Transcribe(ApiHandler):
6
  async def process(self, input: dict, request: Request) -> dict | Response:
7
  audio = input.get("audio")
8
- # ctxid = input.get("ctxid", "")
 
 
 
9
 
10
- # context = self.get_context(ctxid)
11
  # if not await whisper.is_downloaded():
12
  # context.log.log(type="info", content="Whisper STT model is currently being initialized, please wait...")
13
 
 
5
  class Transcribe(ApiHandler):
6
  async def process(self, input: dict, request: Request) -> dict | Response:
7
  audio = input.get("audio")
8
+ ctxid = input.get("ctxid", "")
9
+
10
+ if ctxid:
11
+ context = self.use_context(ctxid)
12
 
 
13
  # if not await whisper.is_downloaded():
14
  # context.log.log(type="info", content="Whisper STT model is currently being initialized, please wait...")
15
 
python/extensions/error_format/_10_mask_errors.py CHANGED
@@ -1,5 +1,5 @@
1
  from python.helpers.extension import Extension
2
- from python.helpers.secrets import SecretsManager
3
 
4
 
5
  class MaskErrorSecrets(Extension):
@@ -10,7 +10,7 @@ class MaskErrorSecrets(Extension):
10
  if not msg:
11
  return
12
 
13
- secrets_mgr = SecretsManager.get_instance()
14
 
15
  # Mask the error message
16
  if "message" in msg:
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
 
4
 
5
  class MaskErrorSecrets(Extension):
 
10
  if not msg:
11
  return
12
 
13
+ secrets_mgr = get_secrets_manager(self.agent.context)
14
 
15
  # Mask the error message
16
  if "message" in msg:
python/extensions/hist_add_before/_10_mask_content.py CHANGED
@@ -1,4 +1,5 @@
1
  from python.helpers.extension import Extension
 
2
 
3
 
4
  class MaskHistoryContent(Extension):
@@ -10,8 +11,7 @@ class MaskHistoryContent(Extension):
10
  return
11
 
12
  try:
13
- from python.helpers.secrets import SecretsManager
14
- secrets_mgr = SecretsManager.get_instance()
15
 
16
  # Mask the content before adding to history
17
  content_data["content"] = self._mask_content(content_data["content"], secrets_mgr)
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
 
4
 
5
  class MaskHistoryContent(Extension):
 
11
  return
12
 
13
  try:
14
+ secrets_mgr = get_secrets_manager(self.agent.context)
 
15
 
16
  # Mask the content before adding to history
17
  content_data["content"] = self._mask_content(content_data["content"], secrets_mgr)
python/extensions/reasoning_stream_chunk/_10_mask_stream.py CHANGED
@@ -1,4 +1,5 @@
1
  from python.helpers.extension import Extension
 
2
 
3
 
4
  class MaskReasoningStreamChunk(Extension):
@@ -10,8 +11,7 @@ class MaskReasoningStreamChunk(Extension):
10
  return
11
 
12
  try:
13
- from python.helpers.secrets import SecretsManager
14
- secrets_mgr = SecretsManager.get_instance()
15
 
16
  # Initialize filter if not exists
17
  filter_key = "_reason_stream_filter"
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
 
4
 
5
  class MaskReasoningStreamChunk(Extension):
 
11
  return
12
 
13
  try:
14
+ secrets_mgr = get_secrets_manager(self.agent.context)
 
15
 
16
  # Initialize filter if not exists
17
  filter_key = "_reason_stream_filter"
python/extensions/response_stream_chunk/_10_mask_stream.py CHANGED
@@ -1,6 +1,6 @@
1
  from python.helpers.extension import Extension
2
- from python.helpers.secrets import SecretsManager
3
  from agent import Agent, LoopData
 
4
 
5
 
6
  class MaskResponseStreamChunk(Extension):
@@ -13,8 +13,7 @@ class MaskResponseStreamChunk(Extension):
13
  return
14
 
15
  try:
16
- from python.helpers.secrets import SecretsManager
17
- secrets_mgr = SecretsManager.get_instance()
18
 
19
  # Initialize filter if not exists
20
  filter_key = "_resp_stream_filter"
 
1
  from python.helpers.extension import Extension
 
2
  from agent import Agent, LoopData
3
+ from python.helpers.secrets import get_secrets_manager
4
 
5
 
6
  class MaskResponseStreamChunk(Extension):
 
13
  return
14
 
15
  try:
16
+ secrets_mgr = get_secrets_manager(self.agent.context)
 
17
 
18
  # Initialize filter if not exists
19
  filter_key = "_resp_stream_filter"
python/extensions/system_prompt/_10_system_prompt.py CHANGED
@@ -58,9 +58,9 @@ def get_mcp_tools_prompt(agent: Agent):
58
  def get_secrets_prompt(agent: Agent):
59
  try:
60
  # Use lazy import to avoid circular dependencies
61
- from python.helpers.secrets import SecretsManager
62
 
63
- secrets_manager = SecretsManager.get_instance()
64
  secrets = secrets_manager.get_secrets_for_prompt()
65
  vars = get_settings()["variables"]
66
  return agent.read_prompt("agent.system.secrets.md", secrets=secrets, vars=vars)
@@ -71,9 +71,9 @@ def get_secrets_prompt(agent: Agent):
71
 
72
  def get_project_prompt(agent: Agent):
73
  result = agent.read_prompt("agent.system.projects.main.md")
74
- project_path = agent.context.get_data(projects.CONTEXT_DATA_KEY_PROJECT_PATH)
75
- if project_path:
76
- project_vars = projects.build_system_prompt_vars(project_path)
77
  result += "\n\n" + agent.read_prompt(
78
  "agent.system.projects.active.md", **project_vars
79
  )
 
58
  def get_secrets_prompt(agent: Agent):
59
  try:
60
  # Use lazy import to avoid circular dependencies
61
+ from python.helpers.secrets import get_secrets_manager
62
 
63
+ secrets_manager = get_secrets_manager(agent.context)
64
  secrets = secrets_manager.get_secrets_for_prompt()
65
  vars = get_settings()["variables"]
66
  return agent.read_prompt("agent.system.secrets.md", secrets=secrets, vars=vars)
 
71
 
72
  def get_project_prompt(agent: Agent):
73
  result = agent.read_prompt("agent.system.projects.main.md")
74
+ project_name = agent.context.get_data(projects.CONTEXT_DATA_KEY_PROJECT)
75
+ if project_name:
76
+ project_vars = projects.build_system_prompt_vars(project_name)
77
  result += "\n\n" + agent.read_prompt(
78
  "agent.system.projects.active.md", **project_vars
79
  )
python/extensions/system_prompt/_20_behaviour_prompt.py CHANGED
@@ -11,7 +11,7 @@ class BehaviourPrompt(Extension):
11
  system_prompt.insert(0, prompt) #.append(prompt)
12
 
13
  def get_custom_rules_file(agent: Agent):
14
- return memory.get_memory_subdir_abs(agent) + f"/behaviour.md"
15
 
16
  def read_rules(agent: Agent):
17
  rules_file = get_custom_rules_file(agent)
 
11
  system_prompt.insert(0, prompt) #.append(prompt)
12
 
13
  def get_custom_rules_file(agent: Agent):
14
+ return files.get_abs_path(memory.get_memory_subdir_abs(agent), "behaviour.md")
15
 
16
  def read_rules(agent: Agent):
17
  rules_file = get_custom_rules_file(agent)
python/extensions/tool_execute_after/_10_mask_secrets.py CHANGED
@@ -1,5 +1,5 @@
1
  from python.helpers.extension import Extension
2
- from python.helpers.secrets import SecretsManager
3
  from python.helpers.tool import Response
4
 
5
 
@@ -8,5 +8,5 @@ class MaskToolSecrets(Extension):
8
  async def execute(self, response: Response | None = None, **kwargs):
9
  if not response:
10
  return
11
- secrets_mgr = SecretsManager.get_instance()
12
  response.message = secrets_mgr.mask_values(response.message)
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
  from python.helpers.tool import Response
4
 
5
 
 
8
  async def execute(self, response: Response | None = None, **kwargs):
9
  if not response:
10
  return
11
+ secrets_mgr = get_secrets_manager(self.agent.context)
12
  response.message = secrets_mgr.mask_values(response.message)
python/extensions/tool_execute_before/_10_unmask_secrets.py CHANGED
@@ -1,5 +1,5 @@
1
  from python.helpers.extension import Extension
2
- from python.helpers.secrets import SecretsManager
3
 
4
 
5
  class UnmaskToolSecrets(Extension):
@@ -10,7 +10,7 @@ class UnmaskToolSecrets(Extension):
10
  if not tool_args:
11
  return
12
 
13
- secrets_mgr = SecretsManager.get_instance()
14
 
15
  # Unmask placeholders in args for actual tool execution
16
  for k, v in tool_args.items():
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
 
4
 
5
  class UnmaskToolSecrets(Extension):
 
10
  if not tool_args:
11
  return
12
 
13
+ secrets_mgr = get_secrets_manager(self.agent.context)
14
 
15
  # Unmask placeholders in args for actual tool execution
16
  for k, v in tool_args.items():
python/extensions/util_model_call_before/_10_mask_secrets.py CHANGED
@@ -1,5 +1,5 @@
1
  from python.helpers.extension import Extension
2
- from python.helpers.secrets import SecretsManager
3
 
4
 
5
  class MaskToolSecrets(Extension):
@@ -8,7 +8,7 @@ class MaskToolSecrets(Extension):
8
  # model call data
9
  call_data:dict = kwargs.get("call_data", {})
10
 
11
- secrets_mgr = SecretsManager.get_instance()
12
 
13
  # mask system and user message
14
  if system:=call_data.get("system"):
 
1
  from python.helpers.extension import Extension
2
+ from python.helpers.secrets import get_secrets_manager
3
 
4
 
5
  class MaskToolSecrets(Extension):
 
8
  # model call data
9
  call_data:dict = kwargs.get("call_data", {})
10
 
11
+ secrets_mgr = get_secrets_manager(self.agent.context)
12
 
13
  # mask system and user message
14
  if system:=call_data.get("system"):
python/helpers/api.py CHANGED
@@ -80,14 +80,18 @@ class ApiHandler:
80
  return Response(response=error, status=500, mimetype="text/plain")
81
 
82
  # get context to run agent zero in
83
- def get_context(self, ctxid: str):
84
  with self.thread_lock:
85
  if not ctxid:
86
  first = AgentContext.first()
87
  if first:
 
88
  return first
89
- return AgentContext(config=initialize_agent())
90
- got = AgentContext.get(ctxid)
 
91
  if got:
92
  return got
93
- return AgentContext(config=initialize_agent(), id=ctxid)
 
 
 
80
  return Response(response=error, status=500, mimetype="text/plain")
81
 
82
  # get context to run agent zero in
83
+ def use_context(self, ctxid: str, create_if_not_exists: bool = True):
84
  with self.thread_lock:
85
  if not ctxid:
86
  first = AgentContext.first()
87
  if first:
88
+ AgentContext.use(first.id)
89
  return first
90
+ context = AgentContext(config=initialize_agent(), set_current=True)
91
+ return context
92
+ got = AgentContext.use(ctxid)
93
  if got:
94
  return got
95
+ if create_if_not_exists:
96
+ context = AgentContext(config=initialize_agent(), id=ctxid, set_current=True)
97
+ return context
python/helpers/context.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextvars import ContextVar
2
+ from typing import Any, TypeVar, cast, Optional, Dict
3
+
4
+ T = TypeVar("T")
5
+
6
+ # no mutable default — None is safe
7
+ _context_data: ContextVar[Optional[Dict[str, Any]]] = ContextVar("_context_data", default=None)
8
+
9
+
10
+ def _ensure_context() -> Dict[str, Any]:
11
+ """Make sure a context dict exists, and return it."""
12
+ data = _context_data.get()
13
+ if data is None:
14
+ data = {}
15
+ _context_data.set(data)
16
+ return data
17
+
18
+
19
+ def set_context_data(key: str, value: Any):
20
+ """Set context data for the current async/task context."""
21
+ data = _ensure_context()
22
+ if data.get(key) == value:
23
+ return
24
+ data[key] = value
25
+ _context_data.set(data)
26
+
27
+
28
+ def delete_context_data(key: str):
29
+ """Delete a key from the current async/task context."""
30
+ data = _ensure_context()
31
+ if key in data:
32
+ del data[key]
33
+ _context_data.set(data)
34
+
35
+
36
+ def get_context_data(key: Optional[str] = None, default: T = None) -> T:
37
+ """Get a key from the current context, or the full dict if key is None."""
38
+ data = _ensure_context()
39
+ if key is None:
40
+ return cast(T, data)
41
+ return cast(T, data.get(key, default))
42
+
43
+
44
+ def clear_context_data():
45
+ """Completely clear the context dict."""
46
+ _context_data.set({})
python/helpers/files.py CHANGED
@@ -14,6 +14,7 @@ import importlib
14
  import importlib.util
15
  import inspect
16
  import glob
 
17
 
18
 
19
  class VariablesPlugin(ABC):
@@ -22,7 +23,9 @@ class VariablesPlugin(ABC):
22
  pass
23
 
24
 
25
- def load_plugin_variables(file: str, backup_dirs: list[str] | None = None) -> dict[str, Any]:
 
 
26
  if not file.endswith(".md"):
27
  return {}
28
 
@@ -38,11 +41,14 @@ def load_plugin_variables(file: str, backup_dirs: list[str] | None = None) -> di
38
  plugin_file = None
39
 
40
  if plugin_file and exists(plugin_file):
41
-
42
  from python.helpers import extract_tools
43
- classes = extract_tools.load_classes_from_file(plugin_file, VariablesPlugin, one_per_file=False)
 
 
 
44
  for cls in classes:
45
- return cls().get_variables(file, backup_dirs) # type: ignore < abstract class here is ok, it is always a subclass
46
 
47
  # load python code and extract variables variables from it
48
  # module = None
@@ -70,10 +76,13 @@ def load_plugin_variables(file: str, backup_dirs: list[str] | None = None) -> di
70
  # return cls[1]().get_variables() # type: ignore
71
  return {}
72
 
 
73
  from python.helpers.strings import sanitize_string
74
 
75
 
76
- def parse_file(_filename: str, _directories: list[str] | None = None, _encoding="utf-8", **kwargs):
 
 
77
  if _directories is None:
78
  _directories = []
79
 
@@ -84,7 +93,7 @@ def parse_file(_filename: str, _directories: list[str] | None = None, _encoding=
84
  with open(absolute_path, "r", encoding=_encoding) as f:
85
  # content = remove_code_fences(f.read())
86
  content = f.read()
87
-
88
  is_json = is_full_json_template(content)
89
  content = remove_code_fences(content)
90
  variables = load_plugin_variables(absolute_path, _directories) or {} # type: ignore
@@ -99,12 +108,16 @@ def parse_file(_filename: str, _directories: list[str] | None = None, _encoding=
99
  # Process include statements
100
  content = process_includes(
101
  # here we use kwargs, the plugin variables are not inherited
102
- content, _directories, **kwargs
 
 
103
  )
104
  return content
105
 
106
 
107
- def read_prompt_file(_file: str, _directories: list[str] | None = None, _encoding="utf-8", **kwargs):
 
 
108
  if _directories is None:
109
  _directories = []
110
 
@@ -131,13 +144,15 @@ def read_prompt_file(_file: str, _directories: list[str] | None = None, _encodin
131
  # Process include statements
132
  content = process_includes(
133
  # here we use kwargs, the plugin variables are not inherited
134
- content, _directories, **kwargs
 
 
135
  )
136
 
137
  return content
138
 
139
 
140
- def read_file(relative_path:str, encoding="utf-8"):
141
  # Try to get the absolute path for the file from the original directory or backup directories
142
  absolute_path = get_abs_path(relative_path)
143
 
@@ -146,7 +161,7 @@ def read_file(relative_path:str, encoding="utf-8"):
146
  return f.read()
147
 
148
 
149
- def read_file_bin(relative_path:str):
150
  # Try to get the absolute path for the file from the original directory or backup directories
151
  absolute_path = get_abs_path(relative_path)
152
 
@@ -248,6 +263,7 @@ def find_file_in_dirs(_filename: str, _directories: list[str]):
248
  f"File '{_filename}' not found in any of the provided directories."
249
  )
250
 
 
251
  def get_unique_filenames_in_dirs(dir_paths: list[str], pattern: str = "*"):
252
  # returns absolute paths for unique filenames, priority by order in dir_paths
253
  seen = set()
@@ -263,6 +279,7 @@ def get_unique_filenames_in_dirs(dir_paths: list[str], pattern: str = "*"):
263
  result.sort(key=lambda path: os.path.basename(path))
264
  return result
265
 
 
266
  def remove_code_fences(text):
267
  # Pattern to match code fences with optional language specifier
268
  pattern = r"(```|~~~)(.*?\n)(.*?)(\1)"
@@ -334,6 +351,7 @@ def delete_dir(relative_path: str):
334
  # suppress all errors - we're ensuring no errors propagate
335
  pass
336
 
 
337
  def move_dir(old_path: str, new_path: str):
338
  # rename/move the directory from old_path to new_path (both relative)
339
  abs_old = get_abs_path(old_path)
@@ -345,6 +363,7 @@ def move_dir(old_path: str, new_path: str):
345
  except Exception:
346
  pass # suppress all errors, keep behavior consistent
347
 
 
348
  # move dir safely, remove with number if needed
349
  def move_dir_safe(src, dst, rename_format="{name}_{number}"):
350
  base_dst = dst
@@ -355,6 +374,7 @@ def move_dir_safe(src, dst, rename_format="{name}_{number}"):
355
  move_dir(src, dst)
356
  return dst
357
 
 
358
  # create dir safely, add number if needed
359
  def create_dir_safe(dst, rename_format="{name}_{number}"):
360
  base_dst = dst
@@ -365,10 +385,12 @@ def create_dir_safe(dst, rename_format="{name}_{number}"):
365
  create_dir(dst)
366
  return dst
367
 
 
368
  def create_dir(relative_path: str):
369
  abs_path = get_abs_path(relative_path)
370
  os.makedirs(abs_path, exist_ok=True)
371
 
 
372
  def list_files(relative_path: str, filter: str = "*"):
373
  abs_path = get_abs_path(relative_path)
374
  if not os.path.exists(abs_path):
@@ -385,18 +407,22 @@ def get_abs_path(*relative_paths):
385
  "Convert relative paths to absolute paths based on the base directory."
386
  return os.path.join(get_base_dir(), *relative_paths)
387
 
388
- def deabsolute_path(path:str):
 
389
  "Convert absolute paths to relative paths based on the base directory."
390
  return os.path.relpath(path, get_base_dir())
391
 
392
- def fix_dev_path(path:str):
 
393
  "On dev environment, convert /a0/... paths to local absolute paths"
394
  from python.helpers.runtime import is_development
 
395
  if is_development():
396
  if path.startswith("/a0/"):
397
  path = path.replace("/a0/", "")
398
  return get_abs_path(path)
399
 
 
400
  def exists(*relative_paths):
401
  path = get_abs_path(*relative_paths)
402
  return os.path.exists(path)
@@ -470,4 +496,29 @@ def move_file(relative_path: str, new_path: str):
470
 
471
  def safe_file_name(filename: str) -> str:
472
  # Replace any character that's not alphanumeric, dash, underscore, or dot with underscore
473
- return re.sub(r'[^a-zA-Z0-9-._]', '_', filename)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  import importlib.util
15
  import inspect
16
  import glob
17
+ import mimetypes
18
 
19
 
20
  class VariablesPlugin(ABC):
 
23
  pass
24
 
25
 
26
+ def load_plugin_variables(
27
+ file: str, backup_dirs: list[str] | None = None
28
+ ) -> dict[str, Any]:
29
  if not file.endswith(".md"):
30
  return {}
31
 
 
41
  plugin_file = None
42
 
43
  if plugin_file and exists(plugin_file):
44
+
45
  from python.helpers import extract_tools
46
+
47
+ classes = extract_tools.load_classes_from_file(
48
+ plugin_file, VariablesPlugin, one_per_file=False
49
+ )
50
  for cls in classes:
51
+ return cls().get_variables(file, backup_dirs) # type: ignore < abstract class here is ok, it is always a subclass
52
 
53
  # load python code and extract variables variables from it
54
  # module = None
 
76
  # return cls[1]().get_variables() # type: ignore
77
  return {}
78
 
79
+
80
  from python.helpers.strings import sanitize_string
81
 
82
 
83
+ def parse_file(
84
+ _filename: str, _directories: list[str] | None = None, _encoding="utf-8", **kwargs
85
+ ):
86
  if _directories is None:
87
  _directories = []
88
 
 
93
  with open(absolute_path, "r", encoding=_encoding) as f:
94
  # content = remove_code_fences(f.read())
95
  content = f.read()
96
+
97
  is_json = is_full_json_template(content)
98
  content = remove_code_fences(content)
99
  variables = load_plugin_variables(absolute_path, _directories) or {} # type: ignore
 
108
  # Process include statements
109
  content = process_includes(
110
  # here we use kwargs, the plugin variables are not inherited
111
+ content,
112
+ _directories,
113
+ **kwargs,
114
  )
115
  return content
116
 
117
 
118
+ def read_prompt_file(
119
+ _file: str, _directories: list[str] | None = None, _encoding="utf-8", **kwargs
120
+ ):
121
  if _directories is None:
122
  _directories = []
123
 
 
144
  # Process include statements
145
  content = process_includes(
146
  # here we use kwargs, the plugin variables are not inherited
147
+ content,
148
+ _directories,
149
+ **kwargs,
150
  )
151
 
152
  return content
153
 
154
 
155
+ def read_file(relative_path: str, encoding="utf-8"):
156
  # Try to get the absolute path for the file from the original directory or backup directories
157
  absolute_path = get_abs_path(relative_path)
158
 
 
161
  return f.read()
162
 
163
 
164
+ def read_file_bin(relative_path: str):
165
  # Try to get the absolute path for the file from the original directory or backup directories
166
  absolute_path = get_abs_path(relative_path)
167
 
 
263
  f"File '{_filename}' not found in any of the provided directories."
264
  )
265
 
266
+
267
  def get_unique_filenames_in_dirs(dir_paths: list[str], pattern: str = "*"):
268
  # returns absolute paths for unique filenames, priority by order in dir_paths
269
  seen = set()
 
279
  result.sort(key=lambda path: os.path.basename(path))
280
  return result
281
 
282
+
283
  def remove_code_fences(text):
284
  # Pattern to match code fences with optional language specifier
285
  pattern = r"(```|~~~)(.*?\n)(.*?)(\1)"
 
351
  # suppress all errors - we're ensuring no errors propagate
352
  pass
353
 
354
+
355
  def move_dir(old_path: str, new_path: str):
356
  # rename/move the directory from old_path to new_path (both relative)
357
  abs_old = get_abs_path(old_path)
 
363
  except Exception:
364
  pass # suppress all errors, keep behavior consistent
365
 
366
+
367
  # move dir safely, remove with number if needed
368
  def move_dir_safe(src, dst, rename_format="{name}_{number}"):
369
  base_dst = dst
 
374
  move_dir(src, dst)
375
  return dst
376
 
377
+
378
  # create dir safely, add number if needed
379
  def create_dir_safe(dst, rename_format="{name}_{number}"):
380
  base_dst = dst
 
385
  create_dir(dst)
386
  return dst
387
 
388
+
389
  def create_dir(relative_path: str):
390
  abs_path = get_abs_path(relative_path)
391
  os.makedirs(abs_path, exist_ok=True)
392
 
393
+
394
  def list_files(relative_path: str, filter: str = "*"):
395
  abs_path = get_abs_path(relative_path)
396
  if not os.path.exists(abs_path):
 
407
  "Convert relative paths to absolute paths based on the base directory."
408
  return os.path.join(get_base_dir(), *relative_paths)
409
 
410
+
411
+ def deabsolute_path(path: str):
412
  "Convert absolute paths to relative paths based on the base directory."
413
  return os.path.relpath(path, get_base_dir())
414
 
415
+
416
+ def fix_dev_path(path: str):
417
  "On dev environment, convert /a0/... paths to local absolute paths"
418
  from python.helpers.runtime import is_development
419
+
420
  if is_development():
421
  if path.startswith("/a0/"):
422
  path = path.replace("/a0/", "")
423
  return get_abs_path(path)
424
 
425
+
426
  def exists(*relative_paths):
427
  path = get_abs_path(*relative_paths)
428
  return os.path.exists(path)
 
496
 
497
  def safe_file_name(filename: str) -> str:
498
  # Replace any character that's not alphanumeric, dash, underscore, or dot with underscore
499
+ return re.sub(r"[^a-zA-Z0-9-._]", "_", filename)
500
+
501
+
502
+ def read_text_files_in_dir(
503
+ dir_path: str, max_size: int = 1024 * 1024
504
+ ) -> dict[str, str]:
505
+
506
+ abs_path = get_abs_path(dir_path)
507
+ if not os.path.exists(abs_path):
508
+ return {}
509
+ result = {}
510
+ for file_path in [os.path.join(abs_path, f) for f in os.listdir(abs_path)]:
511
+ try:
512
+ if not os.path.isfile(file_path):
513
+ continue
514
+ if os.path.getsize(file_path) > max_size:
515
+ continue
516
+ mime, _ = mimetypes.guess_type(file_path)
517
+ if mime is not None and not mime.startswith("text"):
518
+ continue
519
+ # Check if file is binary by reading a small chunk
520
+ content = read_file(file_path)
521
+ result[os.path.basename(file_path)] = content
522
+ except Exception:
523
+ continue
524
+ return result
python/helpers/log.py CHANGED
@@ -1,6 +1,6 @@
1
  from dataclasses import dataclass, field
2
  import json
3
- from typing import Any, Literal, Optional, Dict, TypeVar
4
 
5
  T = TypeVar("T")
6
  import uuid
@@ -8,6 +8,11 @@ from collections import OrderedDict # Import OrderedDict
8
  from python.helpers.strings import truncate_text_by_ratio
9
  import copy
10
  from typing import TypeVar
 
 
 
 
 
11
 
12
  T = TypeVar("T")
13
 
@@ -107,24 +112,7 @@ def _truncate_content(text: str | None) -> str:
107
  return truncated
108
 
109
 
110
- def _mask_recursive(obj: T) -> T:
111
- """Recursively mask secrets in nested objects."""
112
- try:
113
- from python.helpers.secrets import SecretsManager
114
-
115
- secrets_mgr = SecretsManager.get_instance()
116
 
117
- if isinstance(obj, str):
118
- return secrets_mgr.mask_values(obj)
119
- elif isinstance(obj, dict):
120
- return {k: _mask_recursive(v) for k, v in obj.items()} # type: ignore
121
- elif isinstance(obj, list):
122
- return [_mask_recursive(item) for item in obj] # type: ignore
123
- else:
124
- return obj
125
- except Exception as _e:
126
- # If masking fails, return original object
127
- return obj
128
 
129
 
130
  @dataclass
@@ -195,6 +183,7 @@ class LogItem:
195
  class Log:
196
 
197
  def __init__(self):
 
198
  self.guid: str = str(uuid.uuid4())
199
  self.updates: list[int] = []
200
  self.logs: list[LogItem] = []
@@ -250,23 +239,23 @@ class Log:
250
 
251
  # adjust all content before processing
252
  if heading is not None:
253
- heading = _mask_recursive(heading)
254
  heading = _truncate_heading(heading)
255
  item.heading = heading
256
  if content is not None:
257
- content = _mask_recursive(content)
258
  content = _truncate_content(content)
259
  item.content = content
260
  if kvps is not None:
261
  kvps = OrderedDict(copy.deepcopy(kvps))
262
- kvps = _mask_recursive(kvps)
263
  kvps = _truncate_value(kvps)
264
  item.kvps = kvps
265
  elif item.kvps is None:
266
  item.kvps = OrderedDict()
267
  if kwargs:
268
  kwargs = copy.deepcopy(kwargs)
269
- kwargs = _mask_recursive(kwargs)
270
  item.kvps.update(kwargs)
271
 
272
  if type is not None:
@@ -285,7 +274,7 @@ class Log:
285
  self._update_progress_from_item(item)
286
 
287
  def set_progress(self, progress: str, no: int = 0, active: bool = True):
288
- progress = _mask_recursive(progress)
289
  progress = _truncate_progress(progress)
290
  self.progress = progress
291
  if not no:
@@ -324,3 +313,28 @@ class Log:
324
  item.heading,
325
  (item.no if item.update_progress == "persistent" else -1),
326
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from dataclasses import dataclass, field
2
  import json
3
+ from typing import Any, Literal, Optional, Dict, TypeVar, TYPE_CHECKING
4
 
5
  T = TypeVar("T")
6
  import uuid
 
8
  from python.helpers.strings import truncate_text_by_ratio
9
  import copy
10
  from typing import TypeVar
11
+ from python.helpers.secrets import get_secrets_manager
12
+
13
+
14
+ if TYPE_CHECKING:
15
+ from agent import AgentContext
16
 
17
  T = TypeVar("T")
18
 
 
112
  return truncated
113
 
114
 
 
 
 
 
 
 
115
 
 
 
 
 
 
 
 
 
 
 
 
116
 
117
 
118
  @dataclass
 
183
  class Log:
184
 
185
  def __init__(self):
186
+ self.context: "AgentContext|None" = None # set from outside
187
  self.guid: str = str(uuid.uuid4())
188
  self.updates: list[int] = []
189
  self.logs: list[LogItem] = []
 
239
 
240
  # adjust all content before processing
241
  if heading is not None:
242
+ heading = self._mask_recursive(heading)
243
  heading = _truncate_heading(heading)
244
  item.heading = heading
245
  if content is not None:
246
+ content = self._mask_recursive(content)
247
  content = _truncate_content(content)
248
  item.content = content
249
  if kvps is not None:
250
  kvps = OrderedDict(copy.deepcopy(kvps))
251
+ kvps = self._mask_recursive(kvps)
252
  kvps = _truncate_value(kvps)
253
  item.kvps = kvps
254
  elif item.kvps is None:
255
  item.kvps = OrderedDict()
256
  if kwargs:
257
  kwargs = copy.deepcopy(kwargs)
258
+ kwargs = self._mask_recursive(kwargs)
259
  item.kvps.update(kwargs)
260
 
261
  if type is not None:
 
274
  self._update_progress_from_item(item)
275
 
276
  def set_progress(self, progress: str, no: int = 0, active: bool = True):
277
+ progress = self._mask_recursive(progress)
278
  progress = _truncate_progress(progress)
279
  self.progress = progress
280
  if not no:
 
313
  item.heading,
314
  (item.no if item.update_progress == "persistent" else -1),
315
  )
316
+
317
+ def _mask_recursive(self, obj: T) -> T:
318
+ """Recursively mask secrets in nested objects."""
319
+ try:
320
+ from agent import AgentContext
321
+ secrets_mgr = get_secrets_manager(self.context or AgentContext.current())
322
+
323
+ # debug helper to identify context mismatch
324
+ self_id = self.context.id if self.context else None
325
+ current_ctx = AgentContext.current()
326
+ current_id = current_ctx.id if current_ctx else None
327
+ if self_id != current_id:
328
+ print(f"Context ID mismatch: {self_id} != {current_id}")
329
+
330
+ if isinstance(obj, str):
331
+ return secrets_mgr.mask_values(obj)
332
+ elif isinstance(obj, dict):
333
+ return {k: self._mask_recursive(v) for k, v in obj.items()} # type: ignore
334
+ elif isinstance(obj, list):
335
+ return [self._mask_recursive(item) for item in obj] # type: ignore
336
+ else:
337
+ return obj
338
+ except Exception as _e:
339
+ # If masking fails, return original object
340
+ return obj
python/helpers/memory.py CHANGED
@@ -28,7 +28,7 @@ from langchain_core.documents import Document
28
  from python.helpers import knowledge_import
29
  from python.helpers.log import Log, LogItem
30
  from enum import Enum
31
- from agent import Agent
32
  import models
33
  import logging
34
  from simpleeval import simple_eval
@@ -63,7 +63,7 @@ class Memory:
63
 
64
  @staticmethod
65
  async def get(agent: Agent):
66
- memory_subdir = agent.config.memory_subdir or "default"
67
  if Memory.index.get(memory_subdir) is None:
68
  log_item = agent.context.log.log(
69
  type="util",
@@ -136,7 +136,7 @@ class Memory:
136
  em_dir = files.get_abs_path(
137
  "memory/embeddings"
138
  ) # just caching, no need to parameterize
139
- db_dir = Memory._abs_db_dir(memory_subdir)
140
 
141
  # make sure embeddings and database directories exist
142
  os.makedirs(db_dir, exist_ok=True)
@@ -249,7 +249,7 @@ class Memory:
249
  log_item.update(heading="Preloading knowledge...")
250
 
251
  # db abs path
252
- db_dir = Memory._abs_db_dir(memory_subdir)
253
 
254
  # Load the index file if it exists
255
  index_path = files.get_abs_path(db_dir, "knowledge_import.json")
@@ -418,7 +418,7 @@ class Memory:
418
 
419
  @staticmethod
420
  def _save_db_file(db: MyFaiss, memory_subdir: str):
421
- abs_dir = Memory._abs_db_dir(memory_subdir)
422
  db.save_local(folder_path=abs_dir)
423
 
424
  @staticmethod
@@ -446,10 +446,6 @@ class Memory:
446
  ) # float precision can cause values like 1.0000000596046448
447
  return res
448
 
449
- @staticmethod
450
- def _abs_db_dir(memory_subdir: str) -> str:
451
- return files.get_abs_path("memory", memory_subdir)
452
-
453
  @staticmethod
454
  def format_docs_plain(docs: list[Document]) -> list[str]:
455
  result = []
@@ -466,10 +462,6 @@ class Memory:
466
  return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
467
 
468
 
469
- def get_memory_subdir_abs(agent: Agent) -> str:
470
- return files.get_abs_path("memory", agent.config.memory_subdir or "default")
471
-
472
-
473
  def get_custom_knowledge_subdir_abs(agent: Agent) -> str:
474
  for dir in agent.config.knowledge_subdirs:
475
  if dir != "default":
@@ -480,3 +472,56 @@ def get_custom_knowledge_subdir_abs(agent: Agent) -> str:
480
  def reload():
481
  # clear the memory index, this will force all DBs to reload
482
  Memory.index = {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  from python.helpers import knowledge_import
29
  from python.helpers.log import Log, LogItem
30
  from enum import Enum
31
+ from agent import Agent, AgentContext
32
  import models
33
  import logging
34
  from simpleeval import simple_eval
 
63
 
64
  @staticmethod
65
  async def get(agent: Agent):
66
+ memory_subdir = get_agent_memory_subdir(agent)
67
  if Memory.index.get(memory_subdir) is None:
68
  log_item = agent.context.log.log(
69
  type="util",
 
136
  em_dir = files.get_abs_path(
137
  "memory/embeddings"
138
  ) # just caching, no need to parameterize
139
+ db_dir = abs_db_dir(memory_subdir)
140
 
141
  # make sure embeddings and database directories exist
142
  os.makedirs(db_dir, exist_ok=True)
 
249
  log_item.update(heading="Preloading knowledge...")
250
 
251
  # db abs path
252
+ db_dir = abs_db_dir(memory_subdir)
253
 
254
  # Load the index file if it exists
255
  index_path = files.get_abs_path(db_dir, "knowledge_import.json")
 
418
 
419
  @staticmethod
420
  def _save_db_file(db: MyFaiss, memory_subdir: str):
421
+ abs_dir = abs_db_dir(memory_subdir)
422
  db.save_local(folder_path=abs_dir)
423
 
424
  @staticmethod
 
446
  ) # float precision can cause values like 1.0000000596046448
447
  return res
448
 
 
 
 
 
449
  @staticmethod
450
  def format_docs_plain(docs: list[Document]) -> list[str]:
451
  result = []
 
462
  return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
463
 
464
 
 
 
 
 
465
  def get_custom_knowledge_subdir_abs(agent: Agent) -> str:
466
  for dir in agent.config.knowledge_subdirs:
467
  if dir != "default":
 
472
  def reload():
473
  # clear the memory index, this will force all DBs to reload
474
  Memory.index = {}
475
+
476
+
477
+ def abs_db_dir(memory_subdir: str) -> str:
478
+ # patch for projects, this way we don't need to re-work the structure of memory subdirs
479
+ if memory_subdir.startswith("projects/"):
480
+ from python.helpers.projects import get_project_meta_folder
481
+
482
+ return files.get_abs_path(get_project_meta_folder(memory_subdir[9:]), "memory")
483
+ # standard subdirs
484
+ return files.get_abs_path("memory", memory_subdir)
485
+
486
+
487
+ def get_memory_subdir_abs(agent: Agent) -> str:
488
+ subdir = get_agent_memory_subdir(agent)
489
+ return abs_db_dir(subdir)
490
+
491
+
492
+ def get_agent_memory_subdir(agent: Agent) -> str:
493
+ # if project is active, use project memory subdir
494
+ return get_context_memory_subdir(agent.context)
495
+
496
+
497
+ def get_context_memory_subdir(context: AgentContext) -> str:
498
+ # if project is active, use project memory subdir
499
+ from python.helpers.projects import get_context_memory_subdir as get_project_memory_subdir
500
+
501
+ memory_subdir = get_project_memory_subdir(context)
502
+ if memory_subdir:
503
+ return memory_subdir
504
+
505
+ # no project, regular memory subdir
506
+ return context.config.memory_subdir or "default"
507
+
508
+ def get_existing_memory_subdirs() -> list[str]:
509
+ try:
510
+ from python.helpers.projects import get_project_meta_folder, get_projects_parent_folder
511
+ # Get subdirectories from memory folder
512
+ subdirs = files.get_subdirectories("memory", exclude="embeddings")
513
+
514
+ project_subdirs = files.get_subdirectories(get_projects_parent_folder())
515
+ for project_subdir in project_subdirs:
516
+ if files.exists(get_project_meta_folder(project_subdir), "memory", "index.faiss"):
517
+ subdirs.append(f"projects/{project_subdir}")
518
+
519
+ # Ensure 'default' is always available
520
+ if "default" not in subdirs:
521
+ subdirs.insert(0, "default")
522
+
523
+ return subdirs
524
+ except Exception as e:
525
+ PrintStyle.error(f"Failed to get memory subdirectories: {str(e)}")
526
+ return ["default"]
527
+
python/helpers/print_style.py CHANGED
@@ -95,9 +95,10 @@ class PrintStyle:
95
 
96
  # Automatically mask secrets in all print output
97
  try:
98
- from python.helpers.secrets import SecretsManager
99
- secrets_mgr = SecretsManager.get_instance()
100
- text = secrets_mgr.mask_values(text)
 
101
  except Exception:
102
  # If masking fails, proceed without masking to avoid breaking functionality
103
  pass
 
95
 
96
  # Automatically mask secrets in all print output
97
  try:
98
+ if not hasattr(self, "secrets_mgr"):
99
+ from python.helpers.secrets import get_secrets_manager
100
+ self.secrets_mgr = get_secrets_manager()
101
+ text = self.secrets_mgr.mask_values(text)
102
  except Exception:
103
  # If masking fails, proceed without masking to avoid breaking functionality
104
  pass
python/helpers/projects.py CHANGED
@@ -1,78 +1,73 @@
1
  import os
2
- from typing import TypedDict
3
 
4
  from python.helpers import files, dirty_json, persist_chat
5
  from python.helpers.print_style import PrintStyle
6
 
 
 
 
 
7
  PROJECTS_PARENT_DIR = "usr/projects"
8
- PROJECTS_ARCHIVE_DIR = "usr/projects-archived"
9
  PROJECT_META_DIR = ".a0proj"
10
  PROJECT_INSTRUCTIONS_DIR = "instructions"
11
  PROJECT_HEADER_FILE = "project.json"
12
 
13
- CONTEXT_DATA_KEY_PROJECT_PATH = "project_path"
14
- CONTEXT_DATA_KEY_PROJECT_COLOR = "project_color"
15
- CONTEXT_DATA_KEY_PROJECT_NAME = "project_name"
16
 
17
 
18
  class BasicProjectData(TypedDict):
19
- title: str | None
20
- description: str | None
21
- instructions: str | None
22
- color: str | None
 
 
23
 
24
 
25
  class EditProjectData(BasicProjectData):
26
  name: str
27
- path: str
 
 
28
 
29
 
30
  def get_projects_parent_folder():
31
  return files.get_abs_path(PROJECTS_PARENT_DIR)
32
 
33
 
34
- def get_projects_archive_folder():
35
- return files.get_abs_path(PROJECTS_ARCHIVE_DIR)
36
-
37
-
38
  def get_project_folder(name: str):
39
  return files.get_abs_path(get_projects_parent_folder(), name)
40
 
 
 
41
 
42
- def get_archived_project_folder(name: str):
43
- return files.get_abs_path(get_projects_archive_folder(), name)
44
-
45
-
46
- def archive_project(name: str):
47
- return files.move_dir_safe(
48
- get_project_folder(name), get_archived_project_folder(name), rename_format="{name}_{number}"
49
- )
50
-
51
-
52
- def unarchive_project(name: str):
53
- return files.move_dir_safe(
54
- get_archived_project_folder(name), get_project_folder(name), rename_format="{name}_{number}"
55
- )
56
 
57
-
58
- def delete_project(path: str):
59
- files.delete_dir(path)
60
- deactivate_project_in_chats(path)
61
- return path
62
 
63
 
64
  def create_project(name: str, data: BasicProjectData):
65
- new_path = files.create_dir_safe(get_project_folder(name), rename_format="{name}_{number}")
 
 
 
 
 
66
  data = _normalizeBasicData(data)
67
- save_project_files(new_path, data)
68
- return new_path
69
 
70
 
71
- def load_project_header(path: str):
72
- header: dict = dirty_json.parse(
73
- files.read_file(files.get_abs_path(path, PROJECT_META_DIR, PROJECT_HEADER_FILE))
74
- ) # type: ignore
75
- header["path"] = path
 
76
  return header
77
 
78
 
@@ -82,61 +77,96 @@ def _normalizeBasicData(data: BasicProjectData):
82
  description=data.get("description", ""),
83
  instructions=data.get("instructions", ""),
84
  color=data.get("color", ""),
 
85
  )
86
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
- def update_project(path: str, data: BasicProjectData):
89
- current: BasicProjectData = load_edit_project_data(path) # type: ignore
90
- current.update(_normalizeBasicData(data))
91
- save_project_files(path, current)
92
- reactivate_project_in_chats(path)
93
- return path
94
-
95
-
96
- def load_edit_project_data(path: str) -> BasicProjectData:
97
- data = BasicProjectData(
98
- **dirty_json.parse(
99
- files.read_file(
100
- files.get_abs_path(path, PROJECT_META_DIR, PROJECT_HEADER_FILE)
101
- )
102
- ) # type: ignore
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  )
104
- data = _normalizeBasicData(data)
105
- data = EditProjectData(**data, name=os.path.basename(path), path=path)
106
- return data # type: ignore
107
 
108
 
109
- def save_project_files(path: str, data: BasicProjectData):
110
  # save project header file
111
  header = dirty_json.stringify(data)
112
- files.write_file(
113
- files.get_abs_path(path, PROJECT_META_DIR, PROJECT_HEADER_FILE), header
114
  )
115
 
 
 
116
 
117
  def get_active_projects_list():
118
  return _get_projects_list(get_projects_parent_folder())
119
 
120
 
121
- def get_archived_projects_list():
122
- return _get_projects_list(get_projects_archive_folder())
123
-
124
-
125
  def _get_projects_list(parent_dir):
126
  projects = []
127
 
128
  # folders in project directory
129
  for name in os.listdir(parent_dir):
130
  try:
131
- path = os.path.join(parent_dir, name)
132
- if os.path.isdir(path):
133
-
134
- project_data = load_edit_project_data(path)
135
-
136
  projects.append(
137
  {
138
  "name": name,
139
- "path": path,
140
  "title": project_data.get("title", ""),
141
  "description": project_data.get("description", ""),
142
  "color": project_data.get("color", ""),
@@ -146,24 +176,24 @@ def _get_projects_list(parent_dir):
146
  PrintStyle.error(f"Error loading project {name}: {str(e)}")
147
 
148
  # sort projects by name
149
-
150
  projects.sort(key=lambda x: x["name"])
151
  return projects
152
 
153
 
154
- def activate_project(context_id: str, path: str):
155
  from agent import AgentContext
156
 
157
- data = load_edit_project_data(path)
158
  context = AgentContext.get(context_id)
159
  if context is None:
160
  raise Exception("Context not found")
161
- name = str(data.get("title", data.get("name", data.get("path", ""))))
162
- name = name[:22] + "..." if len(name) > 25 else name
163
- context.set_data(CONTEXT_DATA_KEY_PROJECT_PATH, path)
164
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_PATH, path)
165
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_COLOR, data.get("color", ""))
166
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_NAME, name)
 
167
 
168
  # persist
169
  persist_chat.save_tmp_chat(context)
@@ -175,37 +205,87 @@ def deactivate_project(context_id: str):
175
  context = AgentContext.get(context_id)
176
  if context is None:
177
  raise Exception("Context not found")
178
- context.set_data(CONTEXT_DATA_KEY_PROJECT_PATH, None)
179
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_PATH, None)
180
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_COLOR, None)
181
- context.set_output_data(CONTEXT_DATA_KEY_PROJECT_NAME, None)
182
 
183
  # persist
184
  persist_chat.save_tmp_chat(context)
185
 
186
 
187
- def reactivate_project_in_chats(path: str):
188
  from agent import AgentContext
189
 
190
  for context in AgentContext.all():
191
- if context.get_data(CONTEXT_DATA_KEY_PROJECT_PATH) == path:
192
- activate_project(context.id, path)
193
  persist_chat.save_tmp_chat(context)
194
 
195
- def deactivate_project_in_chats(path: str):
 
196
  from agent import AgentContext
197
 
198
  for context in AgentContext.all():
199
- if context.get_data(CONTEXT_DATA_KEY_PROJECT_PATH) == path:
200
  deactivate_project(context.id)
201
  persist_chat.save_tmp_chat(context)
202
 
203
- def build_system_prompt_vars(project_path: str):
204
- project_data = load_edit_project_data(project_path)
 
 
 
 
 
 
 
 
 
205
  return {
206
- "project_path": project_path,
207
  "project_name": project_data.get("title", ""),
208
  "project_description": project_data.get("description", ""),
209
- "project_instructions": project_data.get("instructions", ""),
210
  }
211
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ from typing import Literal, TypedDict, TYPE_CHECKING
3
 
4
  from python.helpers import files, dirty_json, persist_chat
5
  from python.helpers.print_style import PrintStyle
6
 
7
+
8
+ if TYPE_CHECKING:
9
+ from agent import AgentContext
10
+
11
  PROJECTS_PARENT_DIR = "usr/projects"
 
12
  PROJECT_META_DIR = ".a0proj"
13
  PROJECT_INSTRUCTIONS_DIR = "instructions"
14
  PROJECT_HEADER_FILE = "project.json"
15
 
16
+ CONTEXT_DATA_KEY_PROJECT = "project"
 
 
17
 
18
 
19
  class BasicProjectData(TypedDict):
20
+ title: str
21
+ description: str
22
+ instructions: str
23
+ color: str
24
+ memory: Literal["own", "global"] # in the future we can add cutom and point to another existing folder
25
+
26
 
27
 
28
  class EditProjectData(BasicProjectData):
29
  name: str
30
+ instruction_files_count: int
31
+ variables: str
32
+ secrets: str
33
 
34
 
35
  def get_projects_parent_folder():
36
  return files.get_abs_path(PROJECTS_PARENT_DIR)
37
 
38
 
 
 
 
 
39
  def get_project_folder(name: str):
40
  return files.get_abs_path(get_projects_parent_folder(), name)
41
 
42
+ def get_project_meta_folder(name: str):
43
+ return files.get_abs_path(get_project_folder(name), PROJECT_META_DIR)
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
+ def delete_project(name: str):
47
+ abs_path = files.get_abs_path(PROJECTS_PARENT_DIR, name)
48
+ files.delete_dir(abs_path)
49
+ deactivate_project_in_chats(name)
50
+ return name
51
 
52
 
53
  def create_project(name: str, data: BasicProjectData):
54
+ abs_path = files.create_dir_safe(
55
+ files.get_abs_path(PROJECTS_PARENT_DIR, name), rename_format="{name}_{number}"
56
+ )
57
+ files.create_dir(
58
+ files.get_abs_path(abs_path, PROJECT_META_DIR, PROJECT_INSTRUCTIONS_DIR)
59
+ )
60
  data = _normalizeBasicData(data)
61
+ save_project_header(name, data)
62
+ return name
63
 
64
 
65
+ def load_project_header(name: str):
66
+ abs_path = files.get_abs_path(
67
+ PROJECTS_PARENT_DIR, name, PROJECT_META_DIR, PROJECT_HEADER_FILE
68
+ )
69
+ header: dict = dirty_json.parse(files.read_file(abs_path)) # type: ignore
70
+ header["name"] = name
71
  return header
72
 
73
 
 
77
  description=data.get("description", ""),
78
  instructions=data.get("instructions", ""),
79
  color=data.get("color", ""),
80
+ memory=data.get("memory", "own"),
81
  )
82
 
83
+ def _normalizeEditData(data: EditProjectData):
84
+ return EditProjectData(
85
+ name=data.get("name", ""),
86
+ title=data.get("title", ""),
87
+ description=data.get("description", ""),
88
+ instructions=data.get("instructions", ""),
89
+ variables=data.get("variables", ""),
90
+ color=data.get("color", ""),
91
+ instruction_files_count=data.get("instruction_files_count", 0),
92
+ secrets=data.get("secrets", ""),
93
+ memory=data.get("memory", "own"),
94
+ )
95
 
96
+ def _edit_data_to_basic_data(data: EditProjectData):
97
+ return _normalizeBasicData(data)
98
+
99
+ def _basic_data_to_edit_data(data: BasicProjectData):
100
+ return _normalizeEditData(data) # type: ignore
101
+
102
+
103
+ def update_project(name: str, data: EditProjectData):
104
+ # merge with current state
105
+ current = load_edit_project_data(name)
106
+ current.update(data)
107
+ current = _normalizeEditData(current)
108
+
109
+ # save header data
110
+ header = _edit_data_to_basic_data(current)
111
+ save_project_header(name, header)
112
+
113
+ # save secrets
114
+ save_project_variables(name, current["variables"])
115
+ save_project_secrets(name, current["secrets"])
116
+
117
+ reactivate_project_in_chats(name)
118
+ return name
119
+
120
+ def load_basic_project_data(name: str) -> BasicProjectData:
121
+ data = BasicProjectData(**load_project_header(name))
122
+ normalized = _normalizeBasicData(data)
123
+ return normalized
124
+
125
+
126
+ def load_edit_project_data(name: str) -> EditProjectData:
127
+ data = load_basic_project_data(name)
128
+ additional_instructions = get_additional_instructions_files(
129
+ name
130
+ ) # for additional info
131
+ variables = load_project_variables(name)
132
+ secrets = load_project_secrets_masked(name)
133
+ data = EditProjectData(
134
+ **data,
135
+ name=name,
136
+ instruction_files_count=len(additional_instructions),
137
+ variables=variables,
138
+ secrets=secrets,
139
  )
140
+ data = _normalizeEditData(data)
141
+ return data
 
142
 
143
 
144
+ def save_project_header(name: str, data: BasicProjectData):
145
  # save project header file
146
  header = dirty_json.stringify(data)
147
+ abs_path = files.get_abs_path(
148
+ PROJECTS_PARENT_DIR, name, PROJECT_META_DIR, PROJECT_HEADER_FILE
149
  )
150
 
151
+ files.write_file(abs_path, header)
152
+
153
 
154
  def get_active_projects_list():
155
  return _get_projects_list(get_projects_parent_folder())
156
 
157
 
 
 
 
 
158
  def _get_projects_list(parent_dir):
159
  projects = []
160
 
161
  # folders in project directory
162
  for name in os.listdir(parent_dir):
163
  try:
164
+ abs_path = os.path.join(parent_dir, name)
165
+ if os.path.isdir(abs_path):
166
+ project_data = load_basic_project_data(name)
 
 
167
  projects.append(
168
  {
169
  "name": name,
 
170
  "title": project_data.get("title", ""),
171
  "description": project_data.get("description", ""),
172
  "color": project_data.get("color", ""),
 
176
  PrintStyle.error(f"Error loading project {name}: {str(e)}")
177
 
178
  # sort projects by name
 
179
  projects.sort(key=lambda x: x["name"])
180
  return projects
181
 
182
 
183
+ def activate_project(context_id: str, name: str):
184
  from agent import AgentContext
185
 
186
+ data = load_edit_project_data(name)
187
  context = AgentContext.get(context_id)
188
  if context is None:
189
  raise Exception("Context not found")
190
+ display_name = str(data.get("title", name))
191
+ display_name = display_name[:22] + "..." if len(display_name) > 25 else display_name
192
+ context.set_data(CONTEXT_DATA_KEY_PROJECT, name)
193
+ context.set_output_data(
194
+ CONTEXT_DATA_KEY_PROJECT,
195
+ {"name": name, "title": display_name, "color": data.get("color", "")},
196
+ )
197
 
198
  # persist
199
  persist_chat.save_tmp_chat(context)
 
205
  context = AgentContext.get(context_id)
206
  if context is None:
207
  raise Exception("Context not found")
208
+ context.set_data(CONTEXT_DATA_KEY_PROJECT, None)
209
+ context.set_output_data(CONTEXT_DATA_KEY_PROJECT, None)
 
 
210
 
211
  # persist
212
  persist_chat.save_tmp_chat(context)
213
 
214
 
215
+ def reactivate_project_in_chats(name: str):
216
  from agent import AgentContext
217
 
218
  for context in AgentContext.all():
219
+ if context.get_data(CONTEXT_DATA_KEY_PROJECT) == name:
220
+ activate_project(context.id, name)
221
  persist_chat.save_tmp_chat(context)
222
 
223
+
224
+ def deactivate_project_in_chats(name: str):
225
  from agent import AgentContext
226
 
227
  for context in AgentContext.all():
228
+ if context.get_data(CONTEXT_DATA_KEY_PROJECT) == name:
229
  deactivate_project(context.id)
230
  persist_chat.save_tmp_chat(context)
231
 
232
+
233
+ def build_system_prompt_vars(name: str):
234
+ project_data = load_basic_project_data(name)
235
+ main_instructions = project_data.get("instructions", "") or ""
236
+ additional_instructions = get_additional_instructions_files(name)
237
+ complete_instructions = (
238
+ main_instructions
239
+ + "\n\n".join(
240
+ additional_instructions[k] for k in sorted(additional_instructions)
241
+ )
242
+ ).strip()
243
  return {
 
244
  "project_name": project_data.get("title", ""),
245
  "project_description": project_data.get("description", ""),
246
+ "project_instructions": complete_instructions or "",
247
  }
248
 
249
+
250
+ def get_additional_instructions_files(name: str):
251
+ instructions_folder = files.get_abs_path(
252
+ get_project_folder(name), PROJECT_META_DIR, PROJECT_INSTRUCTIONS_DIR
253
+ )
254
+ return files.read_text_files_in_dir(instructions_folder)
255
+
256
+ def get_context_project_name(context: "AgentContext") -> str | None:
257
+ return context.get_data(CONTEXT_DATA_KEY_PROJECT)
258
+
259
+ def load_project_variables(name: str):
260
+ try:
261
+ abs_path = files.get_abs_path(
262
+ get_project_meta_folder(name), "variables.env"
263
+ )
264
+ return files.read_file(abs_path)
265
+ except Exception:
266
+ return ""
267
+
268
+ def save_project_variables(name: str, variables: str):
269
+ abs_path = files.get_abs_path(
270
+ get_project_meta_folder(name), "variables.env"
271
+ )
272
+ files.write_file(abs_path, variables)
273
+
274
+ def load_project_secrets_masked(name:str, merge_with_global=False):
275
+ from python.helpers import secrets
276
+ mgr = secrets.get_project_secrets_manager(name, merge_with_global)
277
+ return mgr.get_masked_secrets()
278
+
279
+ def save_project_secrets(name: str, secrets: str):
280
+ from python.helpers.secrets import get_project_secrets_manager
281
+ secrets_manager = get_project_secrets_manager(name)
282
+ secrets_manager.save_secrets_with_merge(secrets)
283
+
284
+ def get_context_memory_subdir(context: "AgentContext") -> str | None:
285
+ # if a project is active and has memory isolation set, return the project memory subdir
286
+ project_name = get_context_project_name(context)
287
+ if project_name:
288
+ project_data = load_basic_project_data(project_name)
289
+ if project_data["memory"] == "own":
290
+ return "projects/" + project_name
291
+ return None # no memory override
python/helpers/secrets.py CHANGED
@@ -4,27 +4,32 @@ import time
4
  import os
5
  from io import StringIO
6
  from dataclasses import dataclass
7
- from typing import Dict, Optional, List, Literal, Set, Callable
8
  from dotenv.parser import parse_stream
9
  from python.helpers.errors import RepairableException
10
  from python.helpers import files
11
 
 
 
 
12
 
13
  # New alias-based placeholder format §§secret(KEY)
14
  ALIAS_PATTERN = r"§§secret\(([A-Za-z_][A-Za-z0-9_]*)\)"
 
 
15
 
16
  def alias_for_key(key: str, placeholder: str = "§§secret({key})") -> str:
17
  # Return alias string for given key in upper-case
18
  key = key.upper()
19
  return placeholder.format(key=key)
20
 
 
21
  @dataclass
22
  class EnvLine:
23
  raw: str
24
  type: Literal["pair", "comment", "blank", "other"]
25
  key: Optional[str] = None
26
  value: Optional[str] = None
27
- key_part: Optional[str] = None # original left side including whitespace up to '='
28
  inline_comment: Optional[str] = (
29
  None # preserves trailing inline comment including leading spaces and '#'
30
  )
@@ -118,44 +123,55 @@ class StreamingSecretsFilter:
118
 
119
 
120
  class SecretsManager:
121
- SECRETS_FILE = "tmp/secrets.env"
122
  PLACEHOLDER_PATTERN = ALIAS_PATTERN
123
  MASK_VALUE = "***"
124
 
125
- _instance: Optional["SecretsManager"] = None
126
  _secrets_cache: Optional[Dict[str, str]] = None
127
  _last_raw_text: Optional[str] = None
128
 
129
  @classmethod
130
- def get_instance(cls) -> "SecretsManager":
131
- if cls._instance is None:
132
- cls._instance = cls()
133
- return cls._instance
134
-
135
- def __init__(self):
 
 
 
136
  self._lock = threading.RLock()
137
- # instance-level override for secrets file
138
- self._secrets_file_rel = self.SECRETS_FILE
139
-
140
- def set_secrets_file(self, relative_path: str):
141
- """Override the relative secrets file location (useful for tests)."""
142
- with self._lock:
143
- self._secrets_file_rel = relative_path
144
- self.clear_cache()
145
 
146
  def read_secrets_raw(self) -> str:
147
  """Read raw secrets file content from local filesystem (same system)."""
148
- try:
149
- content = files.read_file(self._secrets_file_rel)
150
- self._last_raw_text = content
151
- return content
152
- except Exception:
153
- self._last_raw_text = ""
154
- return ""
 
 
 
 
 
 
 
 
155
 
156
  def _write_secrets_raw(self, content: str):
157
  """Write raw secrets file content to local filesystem."""
158
- files.write_file(self._secrets_file_rel, content)
 
 
 
 
159
 
160
  def load_secrets(self) -> Dict[str, str]:
161
  """Load secrets from file, return key-value dict"""
@@ -163,29 +179,27 @@ class SecretsManager:
163
  if self._secrets_cache is not None:
164
  return self._secrets_cache
165
 
166
- secrets: Dict[str, str] = {}
167
- try:
168
- content = self.read_secrets_raw()
169
- # keep raw snapshot for future save merge without reading again
170
- self._last_raw_text = content
171
- if content:
172
- secrets = self.parse_env_content(content)
173
- except Exception as e:
174
- # On unexpected failure, keep empty cache rather than crash
175
- secrets = {}
176
-
177
- self._secrets_cache = secrets
178
- return secrets
179
 
180
  def save_secrets(self, secrets_content: str):
181
  """Save secrets content to file and update cache"""
 
 
 
 
182
  with self._lock:
183
- # Ensure write to local filesystem (UTF-8)
184
  self._write_secrets_raw(secrets_content)
185
- # Update cache
186
- self._secrets_cache = self.parse_env_content(secrets_content)
187
- # Update raw snapshot
188
- self._last_raw_text = secrets_content
189
 
190
  def save_secrets_with_merge(self, submitted_content: str):
191
  """Merge submitted content with existing file preserving comments, order and supporting deletion.
@@ -193,13 +207,19 @@ class SecretsManager:
193
  - Keys present in existing but omitted from submitted are deleted.
194
  - New keys with non-masked values are appended at the end.
195
  """
 
 
 
 
196
  with self._lock:
197
  # Prefer in-memory snapshot to avoid disk reads during save
 
198
  if self._last_raw_text is not None:
199
  existing_text = self._last_raw_text
200
  else:
201
  try:
202
- existing_text = self.read_secrets_raw()
 
203
  except Exception as e:
204
  # If read fails and submitted contains masked values, abort to avoid losing values/comments
205
  if self.MASK_VALUE in submitted_content:
@@ -210,7 +230,8 @@ class SecretsManager:
210
  existing_text = ""
211
  merged_lines = self._merge_env(existing_text, submitted_content)
212
  merged_text = self._serialize_env_lines(merged_lines)
213
- self.save_secrets(merged_text)
 
214
 
215
  def get_keys(self) -> List[str]:
216
  """Get list of secret keys"""
@@ -219,7 +240,7 @@ class SecretsManager:
219
 
220
  def get_secrets_for_prompt(self) -> str:
221
  """Get formatted string of secret keys for system prompt"""
222
- content = self._last_raw_text or self.read_secrets_raw()
223
  if not content:
224
  return ""
225
 
@@ -251,9 +272,7 @@ class SecretsManager:
251
  return secrets[key]
252
  else:
253
  available_keys = ", ".join(secrets.keys())
254
- error_msg = (
255
- f"Secret placeholder '{alias_for_key(key)}' not found in secrets store.\n"
256
- )
257
  error_msg += f"Available secrets: {available_keys}"
258
 
259
  raise RepairableException(error_msg)
@@ -276,7 +295,9 @@ class SecretsManager:
276
 
277
  return result
278
 
279
- def mask_values(self, text: str, min_length: int = 4, placeholder: str = "§§secret({key})") -> str:
 
 
280
  """Replace actual secret values with placeholders in text"""
281
  if not text:
282
  return text
@@ -295,18 +316,21 @@ class SecretsManager:
295
 
296
  def get_masked_secrets(self) -> str:
297
  """Get content with values masked for frontend display (preserves comments and unrecognized lines)"""
298
- if not (content:=self.read_secrets_raw()):
 
299
  return ""
300
 
301
  # Parse content for known keys using python-dotenv
302
  secrets_map = self.parse_env_content(content)
303
  env_lines = self.parse_env_lines(content)
 
304
  # Replace values with mask for keys present
305
  for ln in env_lines:
306
  if ln.type == "pair" and ln.key is not None:
307
  ln.key = ln.key.upper()
308
  if ln.key in secrets_map and secrets_map[ln.key] != "":
309
  ln.value = self.MASK_VALUE
 
310
  return self._serialize_env_lines(env_lines)
311
 
312
  def parse_env_content(self, content: str) -> Dict[str, str]:
@@ -325,6 +349,13 @@ class SecretsManager:
325
  """Clear the secrets cache"""
326
  with self._lock:
327
  self._secrets_cache = None
 
 
 
 
 
 
 
328
 
329
  # ---------------- Internal helpers for parsing/merging ----------------
330
 
@@ -342,9 +373,7 @@ class SecretsManager:
342
  # Fallback to composed key_part if original not available
343
  if "=" in line_text:
344
  left, right = line_text.split("=", 1)
345
- key_part = left
346
  else:
347
- key_part = binding.key
348
  right = ""
349
  # Try to extract inline comment by scanning right side to comment start, respecting quotes
350
  in_single = False
@@ -376,7 +405,6 @@ class SecretsManager:
376
  type="pair",
377
  key=binding.key,
378
  value=binding.value or "",
379
- key_part=key_part,
380
  inline_comment=inline_comment,
381
  )
382
  )
@@ -404,11 +432,15 @@ class SecretsManager:
404
  out: List[str] = []
405
  for ln in lines:
406
  if ln.type == "pair" and ln.key is not None:
407
- left_raw = ln.key_part if ln.key_part is not None else ln.key
408
  left = left_raw.upper()
409
  val = ln.value if ln.value is not None else ""
410
  comment = ln.inline_comment or ""
411
- formatted_key = key_formatter(left) if key_formatter else f"{key_delimiter}{left}{key_delimiter}"
 
 
 
 
412
  val_part = f'="{val}"' if with_values else ""
413
  comment_part = f" {comment}" if with_comments and comment else ""
414
  out.append(f"{formatted_key}{val_part}{comment_part}")
@@ -455,11 +487,10 @@ class SecretsManager:
455
  existing_val = existing_pairs[key].value or ""
456
  merged.append(
457
  EnvLine(
458
- raw=f"{(sub.key_part or key)}={existing_val}",
459
  type="pair",
460
  key=key,
461
  value=existing_val,
462
- key_part=sub.key_part or key,
463
  inline_comment=sub.inline_comment,
464
  )
465
  )
@@ -471,3 +502,40 @@ class SecretsManager:
471
  merged.append(sub)
472
 
473
  return merged
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  import os
5
  from io import StringIO
6
  from dataclasses import dataclass
7
+ from typing import Dict, Optional, List, Literal, Set, Callable, Tuple, TYPE_CHECKING
8
  from dotenv.parser import parse_stream
9
  from python.helpers.errors import RepairableException
10
  from python.helpers import files
11
 
12
+ if TYPE_CHECKING:
13
+ from agent import AgentContext
14
+
15
 
16
  # New alias-based placeholder format §§secret(KEY)
17
  ALIAS_PATTERN = r"§§secret\(([A-Za-z_][A-Za-z0-9_]*)\)"
18
+ DEFAULT_SECRETS_FILE = "tmp/secrets.env"
19
+
20
 
21
  def alias_for_key(key: str, placeholder: str = "§§secret({key})") -> str:
22
  # Return alias string for given key in upper-case
23
  key = key.upper()
24
  return placeholder.format(key=key)
25
 
26
+
27
  @dataclass
28
  class EnvLine:
29
  raw: str
30
  type: Literal["pair", "comment", "blank", "other"]
31
  key: Optional[str] = None
32
  value: Optional[str] = None
 
33
  inline_comment: Optional[str] = (
34
  None # preserves trailing inline comment including leading spaces and '#'
35
  )
 
123
 
124
 
125
  class SecretsManager:
 
126
  PLACEHOLDER_PATTERN = ALIAS_PATTERN
127
  MASK_VALUE = "***"
128
 
129
+ _instances: Dict[Tuple[str, ...], "SecretsManager"] = {}
130
  _secrets_cache: Optional[Dict[str, str]] = None
131
  _last_raw_text: Optional[str] = None
132
 
133
  @classmethod
134
+ def get_instance(cls, *secrets_files: str) -> "SecretsManager":
135
+ if not secrets_files:
136
+ secrets_files = (DEFAULT_SECRETS_FILE,)
137
+ key = tuple(secrets_files)
138
+ if key not in cls._instances:
139
+ cls._instances[key] = cls(*secrets_files)
140
+ return cls._instances[key]
141
+
142
+ def __init__(self, *files: str):
143
  self._lock = threading.RLock()
144
+ # instance-level list of secrets files
145
+ self._files: Tuple[str, ...] = tuple(files) if files else (DEFAULT_SECRETS_FILE,)
146
+ self._raw_snapshots: Dict[str, str] = {}
147
+ self._secrets_cache = None
148
+ self._last_raw_text = None
 
 
 
149
 
150
  def read_secrets_raw(self) -> str:
151
  """Read raw secrets file content from local filesystem (same system)."""
152
+ parts: List[str] = []
153
+ self._raw_snapshots = {}
154
+
155
+ for path in self._files:
156
+ try:
157
+ content = files.read_file(path)
158
+ except Exception:
159
+ content = ""
160
+
161
+ self._raw_snapshots[path] = content
162
+ parts.append(content)
163
+
164
+ combined = "\n".join(parts)
165
+ self._last_raw_text = combined
166
+ return combined
167
 
168
  def _write_secrets_raw(self, content: str):
169
  """Write raw secrets file content to local filesystem."""
170
+ if len(self._files) != 1:
171
+ raise RuntimeError(
172
+ "Saving secrets content is only supported for a single secrets file"
173
+ )
174
+ files.write_file(self._files[0], content)
175
 
176
  def load_secrets(self) -> Dict[str, str]:
177
  """Load secrets from file, return key-value dict"""
 
179
  if self._secrets_cache is not None:
180
  return self._secrets_cache
181
 
182
+ combined_raw = self.read_secrets_raw()
183
+ merged_secrets = (
184
+ self.parse_env_content(combined_raw) if combined_raw else {}
185
+ )
186
+
187
+ # Only track the first file's raw text for single-file setups
188
+ if len(self._files) != 1:
189
+ self._last_raw_text = None
190
+
191
+ self._secrets_cache = merged_secrets
192
+ return merged_secrets
 
 
193
 
194
  def save_secrets(self, secrets_content: str):
195
  """Save secrets content to file and update cache"""
196
+ if len(self._files) != 1:
197
+ raise RuntimeError(
198
+ "Saving secrets is disabled when multiple files are configured"
199
+ )
200
  with self._lock:
 
201
  self._write_secrets_raw(secrets_content)
202
+ self._invalidate_all_caches()
 
 
 
203
 
204
  def save_secrets_with_merge(self, submitted_content: str):
205
  """Merge submitted content with existing file preserving comments, order and supporting deletion.
 
207
  - Keys present in existing but omitted from submitted are deleted.
208
  - New keys with non-masked values are appended at the end.
209
  """
210
+ if len(self._files) != 1:
211
+ raise RuntimeError(
212
+ "Merging secrets is disabled when multiple files are configured"
213
+ )
214
  with self._lock:
215
  # Prefer in-memory snapshot to avoid disk reads during save
216
+ primary_path = self._files[0]
217
  if self._last_raw_text is not None:
218
  existing_text = self._last_raw_text
219
  else:
220
  try:
221
+ existing_text = files.read_file(primary_path)
222
+ self._raw_snapshots[primary_path] = existing_text
223
  except Exception as e:
224
  # If read fails and submitted contains masked values, abort to avoid losing values/comments
225
  if self.MASK_VALUE in submitted_content:
 
230
  existing_text = ""
231
  merged_lines = self._merge_env(existing_text, submitted_content)
232
  merged_text = self._serialize_env_lines(merged_lines)
233
+ self._write_secrets_raw(merged_text)
234
+ self._invalidate_all_caches()
235
 
236
  def get_keys(self) -> List[str]:
237
  """Get list of secret keys"""
 
240
 
241
  def get_secrets_for_prompt(self) -> str:
242
  """Get formatted string of secret keys for system prompt"""
243
+ content = self.read_secrets_raw()
244
  if not content:
245
  return ""
246
 
 
272
  return secrets[key]
273
  else:
274
  available_keys = ", ".join(secrets.keys())
275
+ error_msg = f"Secret placeholder '{alias_for_key(key)}' not found in secrets store.\n"
 
 
276
  error_msg += f"Available secrets: {available_keys}"
277
 
278
  raise RepairableException(error_msg)
 
295
 
296
  return result
297
 
298
+ def mask_values(
299
+ self, text: str, min_length: int = 4, placeholder: str = "§§secret({key})"
300
+ ) -> str:
301
  """Replace actual secret values with placeholders in text"""
302
  if not text:
303
  return text
 
316
 
317
  def get_masked_secrets(self) -> str:
318
  """Get content with values masked for frontend display (preserves comments and unrecognized lines)"""
319
+ content = self.read_secrets_raw()
320
+ if not content:
321
  return ""
322
 
323
  # Parse content for known keys using python-dotenv
324
  secrets_map = self.parse_env_content(content)
325
  env_lines = self.parse_env_lines(content)
326
+
327
  # Replace values with mask for keys present
328
  for ln in env_lines:
329
  if ln.type == "pair" and ln.key is not None:
330
  ln.key = ln.key.upper()
331
  if ln.key in secrets_map and secrets_map[ln.key] != "":
332
  ln.value = self.MASK_VALUE
333
+
334
  return self._serialize_env_lines(env_lines)
335
 
336
  def parse_env_content(self, content: str) -> Dict[str, str]:
 
349
  """Clear the secrets cache"""
350
  with self._lock:
351
  self._secrets_cache = None
352
+ self._raw_snapshots = {}
353
+ self._last_raw_text = None
354
+
355
+ @classmethod
356
+ def _invalidate_all_caches(cls):
357
+ for instance in cls._instances.values():
358
+ instance.clear_cache()
359
 
360
  # ---------------- Internal helpers for parsing/merging ----------------
361
 
 
373
  # Fallback to composed key_part if original not available
374
  if "=" in line_text:
375
  left, right = line_text.split("=", 1)
 
376
  else:
 
377
  right = ""
378
  # Try to extract inline comment by scanning right side to comment start, respecting quotes
379
  in_single = False
 
405
  type="pair",
406
  key=binding.key,
407
  value=binding.value or "",
 
408
  inline_comment=inline_comment,
409
  )
410
  )
 
432
  out: List[str] = []
433
  for ln in lines:
434
  if ln.type == "pair" and ln.key is not None:
435
+ left_raw = ln.key
436
  left = left_raw.upper()
437
  val = ln.value if ln.value is not None else ""
438
  comment = ln.inline_comment or ""
439
+ formatted_key = (
440
+ key_formatter(left)
441
+ if key_formatter
442
+ else f"{key_delimiter}{left}{key_delimiter}"
443
+ )
444
  val_part = f'="{val}"' if with_values else ""
445
  comment_part = f" {comment}" if with_comments and comment else ""
446
  out.append(f"{formatted_key}{val_part}{comment_part}")
 
487
  existing_val = existing_pairs[key].value or ""
488
  merged.append(
489
  EnvLine(
490
+ raw=f"{key}={existing_val}",
491
  type="pair",
492
  key=key,
493
  value=existing_val,
 
494
  inline_comment=sub.inline_comment,
495
  )
496
  )
 
502
  merged.append(sub)
503
 
504
  return merged
505
+
506
+
507
+ def get_secrets_manager(context: "AgentContext|None" = None) -> SecretsManager:
508
+ from python.helpers import projects
509
+
510
+ # default secrets file
511
+ secret_files = [DEFAULT_SECRETS_FILE]
512
+
513
+ # use AgentContext from contextvars if no context provided
514
+ if not context:
515
+ from agent import AgentContext
516
+ context = AgentContext.current()
517
+
518
+ # merged with project secrets if active
519
+ if context:
520
+ project = projects.get_context_project_name(context)
521
+ if project:
522
+ secret_files.append(files.get_abs_path(projects.get_project_meta_folder(project), "secrets.env"))
523
+
524
+ return SecretsManager.get_instance(*secret_files)
525
+
526
+ def get_project_secrets_manager(project_name: str, merge_with_global: bool = False) -> SecretsManager:
527
+ from python.helpers import projects
528
+
529
+ # default secrets file
530
+ secret_files = []
531
+
532
+ if merge_with_global:
533
+ secret_files.append(DEFAULT_SECRETS_FILE)
534
+
535
+ # merged with project secrets if active
536
+ secret_files.append(files.get_abs_path(projects.get_project_meta_folder(project_name), "secrets.env"))
537
+
538
+ return SecretsManager.get_instance(*secret_files)
539
+
540
+ def get_default_secrets_manager() -> SecretsManager:
541
+ return SecretsManager.get_instance()
python/helpers/settings.py CHANGED
@@ -11,7 +11,7 @@ from python.helpers import runtime, whisper, defer, git
11
  from . import files, dotenv
12
  from python.helpers.print_style import PrintStyle
13
  from python.helpers.providers import get_providers
14
- from python.helpers.secrets import SecretsManager
15
  from python.helpers import dirty_json
16
 
17
 
@@ -1110,7 +1110,7 @@ def convert_out(settings: Settings) -> SettingsOutput:
1110
  # Secrets section
1111
  secrets_fields: list[SettingsField] = []
1112
 
1113
- secrets_manager = SecretsManager.get_instance()
1114
  try:
1115
  secrets = secrets_manager.get_masked_secrets()
1116
  except Exception:
@@ -1417,10 +1417,9 @@ def _write_sensitive_settings(settings: Settings):
1417
  set_root_password(settings["root_password"])
1418
 
1419
  # Handle secrets separately - merge with existing preserving comments/order and support deletions
1420
- secrets_manager = SecretsManager.get_instance()
1421
  submitted_content = settings["secrets"]
1422
  secrets_manager.save_secrets_with_merge(submitted_content)
1423
- secrets_manager.clear_cache() # Clear cache to reload secrets
1424
 
1425
 
1426
 
 
11
  from . import files, dotenv
12
  from python.helpers.print_style import PrintStyle
13
  from python.helpers.providers import get_providers
14
+ from python.helpers.secrets import get_default_secrets_manager
15
  from python.helpers import dirty_json
16
 
17
 
 
1110
  # Secrets section
1111
  secrets_fields: list[SettingsField] = []
1112
 
1113
+ secrets_manager = get_default_secrets_manager()
1114
  try:
1115
  secrets = secrets_manager.get_masked_secrets()
1116
  except Exception:
 
1417
  set_root_password(settings["root_password"])
1418
 
1419
  # Handle secrets separately - merge with existing preserving comments/order and support deletions
1420
+ secrets_manager = get_default_secrets_manager()
1421
  submitted_content = settings["secrets"]
1422
  secrets_manager.save_secrets_with_merge(submitted_content)
 
1423
 
1424
 
1425
 
python/helpers/vector_db.py CHANGED
@@ -14,6 +14,7 @@ from langchain_community.vectorstores.utils import (
14
  DistanceStrategy,
15
  )
16
  from langchain.embeddings import CacheBackedEmbeddings
 
17
 
18
  from agent import Agent
19
 
@@ -140,7 +141,7 @@ def cosine_normalizer(val: float) -> float:
140
  def get_comparator(condition: str):
141
  def comparator(data: dict[str, Any]):
142
  try:
143
- result = eval(condition, {}, data)
144
  return result
145
  except Exception as e:
146
  # PrintStyle.error(f"Error evaluating condition: {e}")
 
14
  DistanceStrategy,
15
  )
16
  from langchain.embeddings import CacheBackedEmbeddings
17
+ from simpleeval import simple_eval
18
 
19
  from agent import Agent
20
 
 
141
  def get_comparator(condition: str):
142
  def comparator(data: dict[str, Any]):
143
  try:
144
+ result = simple_eval(condition, {}, data)
145
  return result
146
  except Exception as e:
147
  # PrintStyle.error(f"Error evaluating condition: {e}")
python/tools/behaviour_adjustment.py CHANGED
@@ -52,7 +52,7 @@ async def update_behaviour(agent: Agent, log_item: LogItem, adjustments: str):
52
 
53
 
54
  def get_custom_rules_file(agent: Agent):
55
- return memory.get_memory_subdir_abs(agent) + f"/behaviour.md"
56
 
57
 
58
  def read_rules(agent: Agent):
 
52
 
53
 
54
  def get_custom_rules_file(agent: Agent):
55
+ return files.get_abs_path(memory.get_memory_subdir_abs(agent), "behaviour.md")
56
 
57
 
58
  def read_rules(agent: Agent):
python/tools/browser_agent.py CHANGED
@@ -9,7 +9,7 @@ from python.helpers import files, defer, persist_chat, strings
9
  from python.helpers.browser_use import browser_use # type: ignore[attr-defined]
10
  from python.helpers.print_style import PrintStyle
11
  from python.helpers.playwright import ensure_playwright_binary
12
- from python.helpers.secrets import SecretsManager
13
  from python.extensions.message_loop_start._10_iteration_no import get_iter_no
14
  from pydantic import BaseModel
15
  import uuid
@@ -153,7 +153,7 @@ class State:
153
 
154
  try:
155
 
156
- secrets_manager = SecretsManager.get_instance()
157
  secrets_dict = secrets_manager.load_secrets()
158
 
159
  self.use_agent = browser_use.Agent(
@@ -216,7 +216,7 @@ class BrowserAgent(Tool):
216
  self.guid = self.agent.context.generate_id() # short random id
217
  reset = str(reset).lower().strip() == "true"
218
  await self.prepare_state(reset=reset)
219
- message = SecretsManager.get_instance().mask_values(message, placeholder="<secret>{key}</secret>") # mask any potential passwords passed from A0 to browser-use to browser-use format
220
  task = self.state.start_task(message) if self.state else None
221
 
222
  # wait for browser agent to finish and update progress with timeout
@@ -394,7 +394,7 @@ class BrowserAgent(Tool):
394
 
395
  def _mask(self, text: str) -> str:
396
  try:
397
- return SecretsManager.get_instance().mask_values(text or "")
398
  except Exception as e:
399
  return text or ""
400
 
 
9
  from python.helpers.browser_use import browser_use # type: ignore[attr-defined]
10
  from python.helpers.print_style import PrintStyle
11
  from python.helpers.playwright import ensure_playwright_binary
12
+ from python.helpers.secrets import get_secrets_manager
13
  from python.extensions.message_loop_start._10_iteration_no import get_iter_no
14
  from pydantic import BaseModel
15
  import uuid
 
153
 
154
  try:
155
 
156
+ secrets_manager = get_secrets_manager(self.agent.context)
157
  secrets_dict = secrets_manager.load_secrets()
158
 
159
  self.use_agent = browser_use.Agent(
 
216
  self.guid = self.agent.context.generate_id() # short random id
217
  reset = str(reset).lower().strip() == "true"
218
  await self.prepare_state(reset=reset)
219
+ message = get_secrets_manager(self.agent.context).mask_values(message, placeholder="<secret>{key}</secret>") # mask any potential passwords passed from A0 to browser-use to browser-use format
220
  task = self.state.start_task(message) if self.state else None
221
 
222
  # wait for browser agent to finish and update progress with timeout
 
394
 
395
  def _mask(self, text: str) -> str:
396
  try:
397
+ return get_secrets_manager(self.agent.context).mask_values(text or "")
398
  except Exception as e:
399
  return text or ""
400
 
tests/rate_limiter_test.py CHANGED
@@ -3,8 +3,8 @@ import sys, os
3
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
4
  import models
5
 
6
- provider = "openai"
7
- name = "gpt-4.1-mini"
8
 
9
  model = models.get_chat_model(
10
  provider=provider,
 
3
  sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
4
  import models
5
 
6
+ provider = "openrouter"
7
+ name = "deepseek/deepseek-r1"
8
 
9
  model = models.get_chat_model(
10
  provider=provider,
webui/components/chat/top-section/chat-top-store.js ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { createStore } from "/js/AlpineStore.js";
2
+
3
+ // define the model object holding data and functions
4
+ const model = {
5
+ connected: false,
6
+ };
7
+
8
+ // convert it to alpine store
9
+ const store = createStore("chatTop", model);
10
+
11
+ // export for use in other files
12
+ export { store };
webui/components/chat/top-section/chat-top.html ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <html>
2
+
3
+ <head>
4
+ <title>Example component or modal</title>
5
+
6
+ <!-- Import the alpine store -->
7
+ <script type="module">
8
+ import { store } from "/components/chat/top-section/chat-top-store.js";
9
+ </script>
10
+ </head>
11
+
12
+ <body>
13
+
14
+ <!-- This construct of x-data + x-if is used to ensure the component is only rendered when the store is available -->
15
+ <div x-data>
16
+ <template x-if="$store.chatTop">
17
+
18
+ <!-- Time and Date -->
19
+ <div id="time-date-container">
20
+ <div id="time-date"></div>
21
+ <div class="status-icon">
22
+ <svg viewBox="0 0 30 30">
23
+ <!-- Connected State (filled circle) -->
24
+ <circle class="connected-circle" cx="15" cy="15" r="8"
25
+ x-bind:fill="$store.chatTop.connected ? '#00c340' : 'none'" x-bind:opacity="$store.chatTop.connected ? 1 : 0" />
26
+
27
+ <!-- Disconnected State (outline circle) -->
28
+ <circle class="disconnected-circle" cx="15" cy="15" r="12" fill="none" stroke="#e40138"
29
+ stroke-width="3" x-bind:opacity="$store.chatTop.connected ? 0 : 1" />
30
+ </svg>
31
+ </div>
32
+ <!-- Notification Toggle positioned next to time-date -->
33
+ <x-component path="notifications/notification-icons.html"></x-component>
34
+ <!-- Project Selector -->
35
+ <x-component path="projects/project-selector.html"></x-component>
36
+ </div>
37
+
38
+ </template>
39
+ </div>
40
+
41
+ </body>
42
+
43
+ </html>
webui/components/modals/file-browser/file-browser-store.js CHANGED
@@ -24,31 +24,36 @@ const model = {
24
  },
25
 
26
  // --- Public API (called from button/link) --------------------------------
27
- async open() {
28
  if (this.isLoading) return; // Prevent double-open
29
-
30
  this.isLoading = true;
31
  this.error = null;
32
  this.history = [];
33
-
34
  try {
35
  // Open modal FIRST (immediate UI feedback)
36
- this.closePromise = window.openModal('modals/file-browser/file-browser.html');
37
-
38
- // Setup cleanup on modal close
39
- if (this.closePromise && typeof this.closePromise.then === 'function') {
40
- this.closePromise.then(() => {
41
- this.destroy();
42
- });
43
- }
44
-
 
 
45
  // Use stored initial path or default
46
- const path = this.initialPath || this.browser.currentPath || "$WORK_DIR";
47
  this.browser.currentPath = path;
48
-
49
  // Fetch files
50
  await this.fetchFiles(this.browser.currentPath);
51
-
 
 
 
 
52
  } catch (error) {
53
  console.error("File browser error:", error);
54
  this.error = error?.message || "Failed to load files";
@@ -128,7 +133,9 @@ const model = {
128
  async fetchFiles(path = "") {
129
  this.isLoading = true;
130
  try {
131
- const response = await fetchApi(`/get_work_dir_files?path=${encodeURIComponent(path)}`);
 
 
132
  if (response.ok) {
133
  const data = await response.json();
134
  this.browser.entries = data.data.entries;
@@ -139,7 +146,10 @@ const model = {
139
  this.browser.entries = [];
140
  }
141
  } catch (e) {
142
- window.toastFrontendError("Error fetching files: " + e.message, "File Browser Error");
 
 
 
143
  this.browser.entries = [];
144
  } finally {
145
  this.isLoading = false;
@@ -147,7 +157,9 @@ const model = {
147
  },
148
 
149
  async navigateToFolder(path) {
150
- if (this.browser.currentPath !== path) this.history.push(this.browser.currentPath);
 
 
151
  await this.fetchFiles(path);
152
  },
153
 
@@ -165,20 +177,32 @@ const model = {
165
  const resp = await fetchApi("/delete_work_dir_file", {
166
  method: "POST",
167
  headers: { "Content-Type": "application/json" },
168
- body: JSON.stringify({ path: file.path, currentPath: this.browser.currentPath }),
 
 
 
169
  });
170
  if (resp.ok) {
171
- this.browser.entries = this.browser.entries.filter((e) => e.path !== file.path);
 
 
172
  alert("File deleted successfully.");
173
  } else {
174
  alert(`Error deleting file: ${await resp.text()}`);
175
  }
176
  } catch (e) {
177
- window.toastFrontendError("Error deleting file: " + e.message, "File Delete Error");
 
 
 
178
  }
179
  },
180
 
181
  async handleFileUpload(event) {
 
 
 
 
182
  try {
183
  const files = event.target.files;
184
  if (!files.length) return;
@@ -186,27 +210,38 @@ const model = {
186
  formData.append("path", this.browser.currentPath);
187
  for (let f of files) {
188
  const ext = f.name.split(".").pop().toLowerCase();
189
- if (!["zip", "tar", "gz", "rar", "7z"].includes(ext) && f.size > 100 * 1024 * 1024) {
 
 
 
190
  alert(`File ${f.name} exceeds 100MB limit.`);
191
  continue;
192
  }
193
  formData.append("files[]", f);
194
  }
195
- const resp = await fetchApi("/upload_work_dir_files", { method: "POST", body: formData });
 
 
 
196
  if (resp.ok) {
197
  const data = await resp.json();
198
  this.browser.entries = data.data.entries;
199
  this.browser.currentPath = data.data.current_path;
200
  this.browser.parentPath = data.data.parent_path;
201
  if (data.failed && data.failed.length) {
202
- const msg = data.failed.map((f) => `${f.name}: ${f.error}`).join("\n");
 
 
203
  alert(`Some files failed to upload:\n${msg}`);
204
  }
205
  } else {
206
  alert(await resp.text());
207
  }
208
  } catch (e) {
209
- window.toastFrontendError("Error uploading files: " + e.message, "File Upload Error");
 
 
 
210
  } finally {
211
  event.target.value = ""; // reset input so same file can be reselected
212
  }
@@ -233,12 +268,14 @@ window.openFileLink = async function (path) {
233
  }
234
  if (resp.is_dir) {
235
  // Set initial path and open via store
236
- store.initialPath = resp.abs_path;
237
- await store.open();
238
  } else {
239
  store.downloadFile({ path: resp.abs_path, name: resp.file_name });
240
  }
241
  } catch (e) {
242
- window.toastFrontendError("Error opening file: " + e.message, "File Open Error");
 
 
 
243
  }
244
  };
 
24
  },
25
 
26
  // --- Public API (called from button/link) --------------------------------
27
+ async open(path = "") {
28
  if (this.isLoading) return; // Prevent double-open
 
29
  this.isLoading = true;
30
  this.error = null;
31
  this.history = [];
32
+
33
  try {
34
  // Open modal FIRST (immediate UI feedback)
35
+ this.closePromise = window.openModal(
36
+ "modals/file-browser/file-browser.html"
37
+ );
38
+
39
+ // // Setup cleanup on modal close
40
+ // if (this.closePromise && typeof this.closePromise.then === "function") {
41
+ // this.closePromise.then(() => {
42
+ // this.destroy();
43
+ // });
44
+ // }
45
+
46
  // Use stored initial path or default
47
+ path = path || this.initialPath || this.browser.currentPath || "$WORK_DIR";
48
  this.browser.currentPath = path;
49
+
50
  // Fetch files
51
  await this.fetchFiles(this.browser.currentPath);
52
+
53
+ // await modal close
54
+ await this.closePromise;
55
+ this.destroy();
56
+
57
  } catch (error) {
58
  console.error("File browser error:", error);
59
  this.error = error?.message || "Failed to load files";
 
133
  async fetchFiles(path = "") {
134
  this.isLoading = true;
135
  try {
136
+ const response = await fetchApi(
137
+ `/get_work_dir_files?path=${encodeURIComponent(path)}`
138
+ );
139
  if (response.ok) {
140
  const data = await response.json();
141
  this.browser.entries = data.data.entries;
 
146
  this.browser.entries = [];
147
  }
148
  } catch (e) {
149
+ window.toastFrontendError(
150
+ "Error fetching files: " + e.message,
151
+ "File Browser Error"
152
+ );
153
  this.browser.entries = [];
154
  } finally {
155
  this.isLoading = false;
 
157
  },
158
 
159
  async navigateToFolder(path) {
160
+ if(!path.startsWith("/")) path = "/" + path;
161
+ if (this.browser.currentPath !== path)
162
+ this.history.push(this.browser.currentPath);
163
  await this.fetchFiles(path);
164
  },
165
 
 
177
  const resp = await fetchApi("/delete_work_dir_file", {
178
  method: "POST",
179
  headers: { "Content-Type": "application/json" },
180
+ body: JSON.stringify({
181
+ path: file.path,
182
+ currentPath: this.browser.currentPath,
183
+ }),
184
  });
185
  if (resp.ok) {
186
+ this.browser.entries = this.browser.entries.filter(
187
+ (e) => e.path !== file.path
188
+ );
189
  alert("File deleted successfully.");
190
  } else {
191
  alert(`Error deleting file: ${await resp.text()}`);
192
  }
193
  } catch (e) {
194
+ window.toastFrontendError(
195
+ "Error deleting file: " + e.message,
196
+ "File Delete Error"
197
+ );
198
  }
199
  },
200
 
201
  async handleFileUpload(event) {
202
+ return store._handleFileUpload(event); // bind to model to ensure correct context
203
+ },
204
+
205
+ async _handleFileUpload(event) {
206
  try {
207
  const files = event.target.files;
208
  if (!files.length) return;
 
210
  formData.append("path", this.browser.currentPath);
211
  for (let f of files) {
212
  const ext = f.name.split(".").pop().toLowerCase();
213
+ if (
214
+ !["zip", "tar", "gz", "rar", "7z"].includes(ext) &&
215
+ f.size > 100 * 1024 * 1024
216
+ ) {
217
  alert(`File ${f.name} exceeds 100MB limit.`);
218
  continue;
219
  }
220
  formData.append("files[]", f);
221
  }
222
+ const resp = await fetchApi("/upload_work_dir_files", {
223
+ method: "POST",
224
+ body: formData,
225
+ });
226
  if (resp.ok) {
227
  const data = await resp.json();
228
  this.browser.entries = data.data.entries;
229
  this.browser.currentPath = data.data.current_path;
230
  this.browser.parentPath = data.data.parent_path;
231
  if (data.failed && data.failed.length) {
232
+ const msg = data.failed
233
+ .map((f) => `${f.name}: ${f.error}`)
234
+ .join("\n");
235
  alert(`Some files failed to upload:\n${msg}`);
236
  }
237
  } else {
238
  alert(await resp.text());
239
  }
240
  } catch (e) {
241
+ window.toastFrontendError(
242
+ "Error uploading files: " + e.message,
243
+ "File Upload Error"
244
+ );
245
  } finally {
246
  event.target.value = ""; // reset input so same file can be reselected
247
  }
 
268
  }
269
  if (resp.is_dir) {
270
  // Set initial path and open via store
271
+ await store.open(resp.abs_path);
 
272
  } else {
273
  store.downloadFile({ path: resp.abs_path, name: resp.file_name });
274
  }
275
  } catch (e) {
276
+ window.toastFrontendError(
277
+ "Error opening file: " + e.message,
278
+ "File Open Error"
279
+ );
280
  }
281
  };
webui/components/modals/file-browser/file-browser.html CHANGED
@@ -211,6 +211,7 @@
211
 
212
  /* Path Navigator Styles */
213
  .path-navigator {
 
214
  display: flex;
215
  align-items: center;
216
  gap: 24px;
 
211
 
212
  /* Path Navigator Styles */
213
  .path-navigator {
214
+ overflow: hidden;
215
  display: flex;
216
  align-items: center;
217
  gap: 24px;
webui/components/notifications/notification-store.js CHANGED
@@ -664,7 +664,8 @@ const model = {
664
  title = "Connection Error",
665
  display_time = 8,
666
  group = "",
667
- priority = defaultPriority
 
668
  ) {
669
  return await this.addFrontendToast(
670
  NotificationType.ERROR,
 
664
  title = "Connection Error",
665
  display_time = 8,
666
  group = "",
667
+ priority = defaultPriority,
668
+ frontendOnly = false
669
  ) {
670
  return await this.addFrontendToast(
671
  NotificationType.ERROR,