Spaces:
Sleeping
Sleeping
chatbot con memoria
Browse files
app.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#simple chatbot con Langchain y usando Amazon Bedrock.
|
| 2 |
+
import gradio as gr
|
| 3 |
+
import random
|
| 4 |
+
import langchain
|
| 5 |
+
import langchain_community
|
| 6 |
+
from langchain_aws import ChatBedrock
|
| 7 |
+
from langchain.chains import ConversationChain
|
| 8 |
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
| 9 |
+
#para manejar la memoria del chat
|
| 10 |
+
import uuid
|
| 11 |
+
from langchain_community.chat_message_histories import ChatMessageHistory
|
| 12 |
+
from langchain_core.chat_history import BaseChatMessageHistory
|
| 13 |
+
from langchain_core.runnables.history import RunnableWithMessageHistory
|
| 14 |
+
|
| 15 |
+
#para langchain 0.2
|
| 16 |
+
store = {}
|
| 17 |
+
|
| 18 |
+
#funcion que retorna un id de sesion usando uuid
|
| 19 |
+
def get_chat_session_id():
|
| 20 |
+
return str(uuid.uuid4())
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def get_session_history(session_id: str) -> BaseChatMessageHistory:
|
| 24 |
+
if session_id not in store:
|
| 25 |
+
store[session_id] = ChatMessageHistory()
|
| 26 |
+
return store[session_id]
|
| 27 |
+
|
| 28 |
+
def init():
|
| 29 |
+
#definir chat con memoria
|
| 30 |
+
|
| 31 |
+
chat = ChatBedrock(
|
| 32 |
+
model_id="anthropic.claude-3-sonnet-20240229-v1:0",
|
| 33 |
+
model_kwargs={"temperature": 0.1}
|
| 34 |
+
)
|
| 35 |
+
prompt = ChatPromptTemplate.from_messages(
|
| 36 |
+
[
|
| 37 |
+
(
|
| 38 |
+
"system",
|
| 39 |
+
"You're an assistant who's good at {ability}. Respond in 20 words or fewer",
|
| 40 |
+
),
|
| 41 |
+
MessagesPlaceholder(variable_name="history"),
|
| 42 |
+
("human", "{input}"),
|
| 43 |
+
]
|
| 44 |
+
)
|
| 45 |
+
runnable = prompt | chat
|
| 46 |
+
|
| 47 |
+
with_message_history = RunnableWithMessageHistory(
|
| 48 |
+
runnable,
|
| 49 |
+
get_session_history,
|
| 50 |
+
input_messages_key="input",
|
| 51 |
+
history_messages_key="history",
|
| 52 |
+
)
|
| 53 |
+
return with_message_history
|
| 54 |
+
|
| 55 |
+
def bedrock_response(message,history):
|
| 56 |
+
the_ability= "Filosofia"
|
| 57 |
+
response=the_chat.invoke(
|
| 58 |
+
{"ability": the_ability, "input": message},
|
| 59 |
+
config={"configurable": {"session_id": id_session}},
|
| 60 |
+
)
|
| 61 |
+
print(type(response))
|
| 62 |
+
print(response)
|
| 63 |
+
return response.content
|
| 64 |
+
|
| 65 |
+
id_session=get_chat_session_id()
|
| 66 |
+
the_chat = init()
|
| 67 |
+
demo = gr.ChatInterface(bedrock_response)
|
| 68 |
+
|
| 69 |
+
if __name__ == "__main__":
|
| 70 |
+
demo.launch()
|