Update src/streamlit_app.py
Browse files- src/streamlit_app.py +31 -0
src/streamlit_app.py
CHANGED
|
@@ -150,6 +150,37 @@ def generate_response(pipeline_obj, prompt, system_prompt, max_length=256, tempe
|
|
| 150 |
except Exception as e:
|
| 151 |
return f"โ Error generating response: {str(e)}"
|
| 152 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
def show_requirements_info():
|
| 154 |
"""Show information about missing requirements"""
|
| 155 |
st.error("๐ซ Required libraries are missing!")
|
|
|
|
| 150 |
except Exception as e:
|
| 151 |
return f"โ Error generating response: {str(e)}"
|
| 152 |
|
| 153 |
+
def create_fallback_demo():
|
| 154 |
+
"""Create a simple demo mode when model loading fails"""
|
| 155 |
+
st.warning("๐ง Model loading failed. Running in demo mode with simulated responses.")
|
| 156 |
+
|
| 157 |
+
class DemoTokenizer:
|
| 158 |
+
def __init__(self):
|
| 159 |
+
self.eos_token_id = 2
|
| 160 |
+
|
| 161 |
+
class DemoPipeline:
|
| 162 |
+
def __init__(self):
|
| 163 |
+
self.tokenizer = DemoTokenizer()
|
| 164 |
+
|
| 165 |
+
def __call__(self, prompt, **kwargs):
|
| 166 |
+
# Simulate response generation
|
| 167 |
+
time.sleep(1) # Simulate processing time
|
| 168 |
+
|
| 169 |
+
# Simple demo responses based on input
|
| 170 |
+
if any(arabic_word in prompt for arabic_word in ['ู
ุฑุญุจุง', 'ุงูุณูุงู
', 'ุฃููุง']):
|
| 171 |
+
response = "ู
ุฑุญุจุง ุจู! ุฃูุง ุญููู
ุ ู
ุณุงุนุฏู ุงูุฐูู. ููู ูู
ูููู ู
ุณุงุนุฏุชู ุงูููู
ุ"
|
| 172 |
+
elif 'hello' in prompt.lower() or 'hi' in prompt.lower():
|
| 173 |
+
response = "Hello! I'm Hakim, your AI assistant. How can I help you today?"
|
| 174 |
+
elif 'what' in prompt.lower() and 'ai' in prompt.lower():
|
| 175 |
+
response = "Artificial Intelligence (AI) refers to computer systems that can perform tasks that typically require human intelligence, such as learning, reasoning, and problem-solving."
|
| 176 |
+
else:
|
| 177 |
+
response = "I understand your question. This is a demo response since the actual model couldn't be loaded. In a real deployment, I would provide a more detailed and contextual answer based on the Rabe3/Hakim model."
|
| 178 |
+
|
| 179 |
+
return [{'generated_text': response}]
|
| 180 |
+
|
| 181 |
+
st.info("โ
Demo mode initialized!")
|
| 182 |
+
return DemoTokenizer(), None, DemoPipeline()
|
| 183 |
+
|
| 184 |
def show_requirements_info():
|
| 185 |
"""Show information about missing requirements"""
|
| 186 |
st.error("๐ซ Required libraries are missing!")
|