Spaces:
No application file
No application file
| """ | |
| Data Scientist.: Dr.Eddy Giusepe Chirinos Isidro | |
| Objetivo: Estudar o uso de Memória no LangChain, | |
| para ter ChatBots mais inteligentes. | |
| """ | |
| import os | |
| import openai | |
| from dotenv import find_dotenv, load_dotenv | |
| _ = load_dotenv(find_dotenv()) # read local .env file | |
| openai.api_key = os.getenv('OPENAI_API_KEY') | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import ConversationChain | |
| from langchain.memory import ConversationBufferMemory | |
| memory = ConversationBufferMemory() | |
| llm = ChatOpenAI(temperature=0.0, | |
| max_tokens=150, | |
| verbose=False | |
| ) | |
| conversation = ConversationChain(llm=llm, | |
| verbose=False, | |
| memory=memory | |
| ) | |
| # Salvando o Prompt e o Modelo: | |
| #conversation.llm.save("eddy_LLM.json") # AttributeError: 'ChatOpenAI' object has no attribute 'save' | |
| conversation.prompt.save("eddy_prompt.json") | |
| # memory.chat_memory.add_user_message("Meu nome é Eddy Giusepe.") | |
| # memory.chat_memory.add_user_message("Eu sou Cientista de dados e trabalho na central IT.") | |
| # memory.chat_memory.add_user_message("Eu sou Peruano.") | |
| print("Digite a sua pergunta para começar uma conversa com a AI: ") | |
| while True: | |
| query = input("Human: ") | |
| result = conversation({"input": query}) | |
| #print(result) | |
| print("AI: " + result['response']) | |
| memory.chat_memory.add_user_message(query) | |
| memory.chat_memory.add_ai_message(result['response']) | |
| memory.save_context({"input": query}, {"output": result['response']}) | |
| memory_variables = memory.load_memory_variables({}) | |
| print("🤗🤗🤗") | |
| #print(memory_variables) | |
| #print("#"*30) | |
| #print(memory_variables['history']) | |
| if not query: | |
| break | |