Spaces:
Runtime error
Runtime error
Upload 2 files
Browse files- app.py +22 -11
- requirements.txt +5 -0
app.py
CHANGED
|
@@ -1,20 +1,31 @@
|
|
|
|
|
| 1 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
-
|
| 4 |
-
|
|
|
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
|
| 9 |
def responder(pregunta):
|
| 10 |
-
respuesta = (
|
| 11 |
-
|
| 12 |
-
"pero puedo ayudarte a navegar el contenido del curso. Por favor consulta el sílabo o calendario."
|
| 13 |
-
)
|
| 14 |
-
return respuesta
|
| 15 |
|
| 16 |
iface = gr.Interface(fn=responder, inputs="text", outputs="text",
|
| 17 |
title="Asistente de Bioestadística",
|
| 18 |
-
description="Haz una pregunta sobre el curso.
|
| 19 |
-
|
| 20 |
iface.launch()
|
|
|
|
| 1 |
+
|
| 2 |
import gradio as gr
|
| 3 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
|
| 4 |
+
from llama_index.core.node_parser import SentenceSplitter
|
| 5 |
+
from llama_index.core.embeddings import HuggingFaceEmbedding
|
| 6 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbeddingModel
|
| 7 |
+
from llama_index.llms import HuggingFaceLLM
|
| 8 |
+
from llama_index.core import Settings
|
| 9 |
+
|
| 10 |
+
# Paso 1: Configuración del modelo
|
| 11 |
+
embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
| 12 |
+
llm = HuggingFaceLLM(model="tiiuae/falcon-rw-1b", tokenizer="tiiuae/falcon-rw-1b", context_window=2048)
|
| 13 |
+
Settings.embed_model = embed_model
|
| 14 |
+
Settings.llm = llm
|
| 15 |
+
Settings.node_parser = SentenceSplitter(chunk_size=512, chunk_overlap=50)
|
| 16 |
|
| 17 |
+
# Paso 2: Cargar PDFs y construir índice
|
| 18 |
+
documents = SimpleDirectoryReader(input_dir=".", recursive=True, required_exts=[".pdf"]).load_data()
|
| 19 |
+
index = VectorStoreIndex.from_documents(documents)
|
| 20 |
|
| 21 |
+
# Paso 3: Crear chatbot
|
| 22 |
+
query_engine = index.as_query_engine()
|
| 23 |
|
| 24 |
def responder(pregunta):
|
| 25 |
+
respuesta = query_engine.query(pregunta)
|
| 26 |
+
return str(respuesta)
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
iface = gr.Interface(fn=responder, inputs="text", outputs="text",
|
| 29 |
title="Asistente de Bioestadística",
|
| 30 |
+
description="Haz una pregunta sobre el curso. Responderé basándome en el sílabo y calendario oficial.")
|
|
|
|
| 31 |
iface.launch()
|
requirements.txt
CHANGED
|
@@ -1,2 +1,7 @@
|
|
| 1 |
|
| 2 |
gradio
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
|
| 2 |
gradio
|
| 3 |
+
llama-index
|
| 4 |
+
transformers
|
| 5 |
+
sentence-transformers
|
| 6 |
+
torch
|
| 7 |
+
accelerate
|