curiouscurrent commited on
Commit
3f96a46
·
verified ·
1 Parent(s): d219650

Update coordinator/task_parser.py

Browse files
Files changed (1) hide show
  1. coordinator/task_parser.py +18 -9
coordinator/task_parser.py CHANGED
@@ -1,15 +1,24 @@
1
  # coordinator/task_parser.py
 
 
 
 
 
 
 
 
2
 
3
  def parse_brief(brief: str):
4
  """
5
- Minimal parser: splits the brief into simple tasks
6
  """
7
- # Very simple splitting by keywords for demonstration
8
- tasks = []
9
- if "authentication" in brief:
10
- tasks.append("Implement authentication system")
11
- if "task sharing" in brief:
12
- tasks.append("Implement task sharing feature")
13
- if "task management" in brief:
14
- tasks.append("Implement task management module")
 
15
  return tasks
 
1
  # coordinator/task_parser.py
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
+
5
+ # Load model (small model for demo, change to larger if needed)
6
+ MODEL_NAME = "facebook/opt-125m"
7
+
8
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
9
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
10
 
11
  def parse_brief(brief: str):
12
  """
13
+ Use a decoder LLM to extract tasks from project brief
14
  """
15
+ prompt = f"Extract a list of technical tasks from this project brief:\n{brief}\n\nTasks:"
16
+ inputs = tokenizer(prompt, return_tensors="pt")
17
+ outputs = model.generate(**inputs, max_new_tokens=150)
18
+ decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
19
+
20
+ # Extract lines after "Tasks:"
21
+ if "Tasks:" in decoded:
22
+ decoded = decoded.split("Tasks:")[-1]
23
+ tasks = [line.strip("- ").strip() for line in decoded.split("\n") if line.strip()]
24
  return tasks