NavyDevilDoc commited on
Commit
5f4804b
·
verified ·
1 Parent(s): 0f92342

Update app.py

Browse files

increased number of output tokens

Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -115,9 +115,9 @@ def ask_llm(query, context):
115
  # 2. Prepare the payload matching your FastAPI 'PromptRequest' schema
116
  payload = {
117
  "text": f"USER QUESTION: {query}\n\nDOCUMENT CONTEXT:\n{context[:6000]}",
118
- "persona": "You are a Senior Navy Yeoman and Subject Matter Expert. Answer strictly based on the provided context.",
119
  "model": "granite4:latest", # You can swap this for 'gemma3:latest' or 'llama3.2:latest' anytime!
120
- "max_tokens": 1000
121
  }
122
 
123
  # 3. Headers for Authentication (Crucial for Private Spaces)
@@ -388,7 +388,9 @@ if query:
388
  if st.button("✨ Summarize Top Document"):
389
  with st.spinner("Reading full document..."):
390
  ai_response = ask_llm(query, full_doc_text)
 
391
  st.success(ai_response)
 
392
  st.divider()
393
  # --- LLM INTEGRATION END ---
394
 
 
115
  # 2. Prepare the payload matching your FastAPI 'PromptRequest' schema
116
  payload = {
117
  "text": f"USER QUESTION: {query}\n\nDOCUMENT CONTEXT:\n{context[:6000]}",
118
+ "persona": "You are a Senior Navy Yeoman and Subject Matter Expert. Provide a concise answer strictly based on the provided context.",
119
  "model": "granite4:latest", # You can swap this for 'gemma3:latest' or 'llama3.2:latest' anytime!
120
+ "max_tokens": 5000
121
  }
122
 
123
  # 3. Headers for Authentication (Crucial for Private Spaces)
 
388
  if st.button("✨ Summarize Top Document"):
389
  with st.spinner("Reading full document..."):
390
  ai_response = ask_llm(query, full_doc_text)
391
+ st.markdown("---")
392
  st.success(ai_response)
393
+ st.markdown("---")
394
  st.divider()
395
  # --- LLM INTEGRATION END ---
396