eikarna
commited on
Commit
·
6563cfd
1
Parent(s):
a2aeda2
Feat: debug toggle
Browse files
app.py
CHANGED
|
@@ -53,10 +53,8 @@ def configure_sidebar() -> Dict[str, Any]:
|
|
| 53 |
|
| 54 |
def format_deepseek_prompt(system_message: str, user_input: str) -> str:
|
| 55 |
"""Format the prompt according to DeepSeek's required structure"""
|
| 56 |
-
return f"""
|
| 57 |
-
<|
|
| 58 |
-
<|beginofutterance|>User: {user_input}<|endofutterance|>
|
| 59 |
-
<|beginofutterance|>Assistant:"""
|
| 60 |
|
| 61 |
def query_hf_api(payload: Dict[str, Any], api_url: str) -> Optional[Dict[str, Any]]:
|
| 62 |
"""Handle API requests with improved error handling"""
|
|
@@ -103,7 +101,6 @@ def handle_chat_interaction(settings: Dict[str, Any]):
|
|
| 103 |
"temperature": settings["temperature"],
|
| 104 |
"top_p": settings["top_p"],
|
| 105 |
"return_full_text": settings["debug_chat"],
|
| 106 |
-
"stop_sequences": ["<|endofutterance|>", "<|beginofutterance|>"] # Add stop sequences
|
| 107 |
}
|
| 108 |
}
|
| 109 |
|
|
@@ -114,7 +111,7 @@ def handle_chat_interaction(settings: Dict[str, Any]):
|
|
| 114 |
if 'generated_text' in output[0]:
|
| 115 |
response_text = output[0]['generated_text'].strip()
|
| 116 |
# Remove any remaining special tokens
|
| 117 |
-
response_text = response_text.
|
| 118 |
|
| 119 |
# Display and store response
|
| 120 |
with st.chat_message("assistant"):
|
|
@@ -145,6 +142,7 @@ def main():
|
|
| 145 |
settings = configure_sidebar()
|
| 146 |
|
| 147 |
st.title("🤖 DeepSeek Chatbot")
|
|
|
|
| 148 |
st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
|
| 149 |
|
| 150 |
display_chat_history()
|
|
|
|
| 53 |
|
| 54 |
def format_deepseek_prompt(system_message: str, user_input: str) -> str:
|
| 55 |
"""Format the prompt according to DeepSeek's required structure"""
|
| 56 |
+
return f"""System: {system_message}
|
| 57 |
+
<|User|>{user_input}<|Assistant|>"""
|
|
|
|
|
|
|
| 58 |
|
| 59 |
def query_hf_api(payload: Dict[str, Any], api_url: str) -> Optional[Dict[str, Any]]:
|
| 60 |
"""Handle API requests with improved error handling"""
|
|
|
|
| 101 |
"temperature": settings["temperature"],
|
| 102 |
"top_p": settings["top_p"],
|
| 103 |
"return_full_text": settings["debug_chat"],
|
|
|
|
| 104 |
}
|
| 105 |
}
|
| 106 |
|
|
|
|
| 111 |
if 'generated_text' in output[0]:
|
| 112 |
response_text = output[0]['generated_text'].strip()
|
| 113 |
# Remove any remaining special tokens
|
| 114 |
+
response_text = response_text.split("\n</think>\n")[0].strip()
|
| 115 |
|
| 116 |
# Display and store response
|
| 117 |
with st.chat_message("assistant"):
|
|
|
|
| 142 |
settings = configure_sidebar()
|
| 143 |
|
| 144 |
st.title("🤖 DeepSeek Chatbot")
|
| 145 |
+
st.caption(f"Current Model: {settings['model']}")
|
| 146 |
st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
|
| 147 |
|
| 148 |
display_chat_history()
|