Spaces:
Sleeping
Sleeping
Updated app to add a context, as it seems to be required.
Browse files
app.py
CHANGED
|
@@ -2,9 +2,17 @@ import streamlit as st
|
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
qa_pipeline = pipeline("question-answering")
|
| 5 |
-
|
| 6 |
st.title("Adrega AI Help")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
user_input = st.text_input('Ask me a question')
|
| 8 |
if st.button("Submit"):
|
| 9 |
-
result = qa_pipeline(question=user_input, context=
|
| 10 |
st.write(f"Adrega AI: {result['answer']}")
|
|
|
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
qa_pipeline = pipeline("question-answering")
|
|
|
|
| 5 |
st.title("Adrega AI Help")
|
| 6 |
+
context = """
|
| 7 |
+
Adrega PI Help AI is Adregas fourth attempt to generate AI based help
|
| 8 |
+
content for the users. First we tried Gemini, but it was not very cost
|
| 9 |
+
effective and we werent able to use if for reading help docuemntation
|
| 10 |
+
and answer based on it. Then we tried GPT-4all, but it was very slow and since
|
| 11 |
+
running locally it ate up a lot of precious resources. Finally we tried
|
| 12 |
+
Hugging Face, and this far, it seems to be working.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
user_input = st.text_input('Ask me a question')
|
| 16 |
if st.button("Submit"):
|
| 17 |
+
result = qa_pipeline(question=user_input, context=context)
|
| 18 |
st.write(f"Adrega AI: {result['answer']}")
|