Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -80,7 +80,7 @@ def save_memory(purpose: str, content: str) -> List[Dict]:
|
|
| 80 |
# Generate structured data
|
| 81 |
prompt = f"{INDEX_PROMPT}\nData to index:\n{content[:5000]}" # Truncate for API limits
|
| 82 |
try:
|
| 83 |
-
response = generate_response(prompt, model="
|
| 84 |
structured_data = json.loads(response)
|
| 85 |
except Exception as e:
|
| 86 |
print(f"Memory processing error: {e}")
|
|
@@ -139,16 +139,12 @@ def read_file_content(file_path: str) -> str:
|
|
| 139 |
return f.read()
|
| 140 |
return ""
|
| 141 |
|
| 142 |
-
def generate_response(prompt: str, model: str = "
|
| 143 |
"""Generate response using OpenRouter API."""
|
| 144 |
try:
|
| 145 |
response = openai.ChatCompletion.create(
|
| 146 |
model=model,
|
| 147 |
messages=[{"role": "user", "content": prompt}],
|
| 148 |
-
headers={
|
| 149 |
-
"HTTP-Referer": "https://your-site-url.com",
|
| 150 |
-
"X-Title": "Your App Name"
|
| 151 |
-
}
|
| 152 |
)
|
| 153 |
return response.choices[0].message.content
|
| 154 |
except Exception as e:
|
|
@@ -178,7 +174,7 @@ def summarize(
|
|
| 178 |
file: Optional[str] = None,
|
| 179 |
url: str = "",
|
| 180 |
pdf_url: str = "",
|
| 181 |
-
model: str = "
|
| 182 |
) -> Generator[Tuple[str, List[Tuple[str, str]], str, Dict], None, None]:
|
| 183 |
"""Main summarization function with memory support."""
|
| 184 |
history = [(inp, "Processing...")]
|
|
|
|
| 80 |
# Generate structured data
|
| 81 |
prompt = f"{INDEX_PROMPT}\nData to index:\n{content[:5000]}" # Truncate for API limits
|
| 82 |
try:
|
| 83 |
+
response = generate_response(prompt, model="meta-llama/llama-4-maverick:free")
|
| 84 |
structured_data = json.loads(response)
|
| 85 |
except Exception as e:
|
| 86 |
print(f"Memory processing error: {e}")
|
|
|
|
| 139 |
return f.read()
|
| 140 |
return ""
|
| 141 |
|
| 142 |
+
def generate_response(prompt: str, model: str = "meta-llama/llama-4-maverick:free") -> str:
|
| 143 |
"""Generate response using OpenRouter API."""
|
| 144 |
try:
|
| 145 |
response = openai.ChatCompletion.create(
|
| 146 |
model=model,
|
| 147 |
messages=[{"role": "user", "content": prompt}],
|
|
|
|
|
|
|
|
|
|
|
|
|
| 148 |
)
|
| 149 |
return response.choices[0].message.content
|
| 150 |
except Exception as e:
|
|
|
|
| 174 |
file: Optional[str] = None,
|
| 175 |
url: str = "",
|
| 176 |
pdf_url: str = "",
|
| 177 |
+
model: str = "meta-llama/llama-4-maverick:free"
|
| 178 |
) -> Generator[Tuple[str, List[Tuple[str, str]], str, Dict], None, None]:
|
| 179 |
"""Main summarization function with memory support."""
|
| 180 |
history = [(inp, "Processing...")]
|