File size: 1,385 Bytes
cbeee94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import torch
from typing import Dict, Any

from reasoning.scraper import scrape_social_knowledge

class ReasoningAgent:

    def __init__(self, engine):
        self.engine = engine

    def reason(self, query: str, model_outputs: Dict[str, torch.Tensor]):

        reasoning_steps = []

        # 1. Memory retrieval
        memories = self.engine.ltm.retrieve_text(query, k=5)

        if memories:
            reasoning_steps.extend(memories)

        # 2. Model reasoning
        if model_outputs:
            for name, tensor in model_outputs.items():

                if isinstance(tensor, torch.Tensor):
                    score = torch.mean(tensor).item()

                    reasoning_steps.append(
                        f"{name} relevance score {score:.3f}"
                    )

        # 3. If reasoning is weak → use scraper
        if len(reasoning_steps) < 2:

            scraped = scrape_social_knowledge(query)

            for item in scraped[:5]:

                reasoning_steps.append(item["text"])

                # store knowledge in memory
                embedding = self.engine.sentence_encoder.encode(item["text"])

                self.engine.ltm.store_embedding(
                    embedding,
                    metadata=item
                )

        # 4. Synthesize answer
        response = " ".join(reasoning_steps)

        return response