GSKRAGDemoReplic / azure_openai.py
davidfearne's picture
Update azure_openai.py
4425cb6 verified
# openai
import streamlit as st
# from langchain.chat_models import AzureChatOpenAI
from langchain_openai import AzureChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate
# LLM Langchain Definition
OPENAI_API_KEY = st.secrets['azure_api_key']
OPENAI_API_TYPE = "azure"
OPENAI_API_BASE = "https://davidfearn-gpt4.openai.azure.com"
# OPENAI_API_VERSION = "2024-02-01"
OPENAI_API_VERSION = "2024-08-01-preview"
# OPENAI_MODEL = "gpt4-turbo-1106"
OPENAI_MODEL = "gpt-4o"
# Initialize an instance of AzureOpenAI using the specified settings
def read_file(file):
"""
Reads the content of a text file and returns it as a string.
:param approver: The type of approver.
:return: The content of the file as a string.
"""
fp = f"assets/{file}.md"
try:
with open(fp, 'r', encoding='utf-8') as file:
content = file.read()
return content
except FileNotFoundError:
print(f"The file at {fp} was not found.")
except IOError:
print(f"An error occurred while reading the file at {fp}.")
def qt(systemMessgae, history, temp, tokens, file):
asset = read_file(file)
llm = AzureChatOpenAI(
openai_api_version=OPENAI_API_VERSION,
openai_api_key=OPENAI_API_KEY,
azure_endpoint=OPENAI_API_BASE,
openai_api_type=OPENAI_API_TYPE,
deployment_name=OPENAI_MODEL,
temperature=temp,
max_tokens=tokens # Name of the deployment for identification
)
userMessage = """## Converstaion to date: {conversationToDate}
## Create Optimised Query
"""
system_message_template = SystemMessagePromptTemplate.from_template(systemMessgae)
human_message_template = HumanMessagePromptTemplate.from_template(userMessage)
# Create a chat prompt template combining system and human messages
prompt = ChatPromptTemplate.from_messages([system_message_template, human_message_template])
chain = prompt | llm | StrOutputParser()
return chain.invoke({
"assetGlossary": asset,
"conversationToDate": history
})
def get_response(chat_history, qte, knowledge, temp2, tokens2, persona2SystemMessage, persona2UserMessage, file):
asset = read_file(file)
llm = AzureChatOpenAI(
openai_api_version=OPENAI_API_VERSION,
openai_api_key=OPENAI_API_KEY,
azure_endpoint=OPENAI_API_BASE,
openai_api_type=OPENAI_API_TYPE,
deployment_name=OPENAI_MODEL,
temperature=temp2,
max_tokens=tokens2
# Name of the deployment for identification
)
system_message_template = SystemMessagePromptTemplate.from_template(persona2SystemMessage)
# Create a chat prompt template combining system and human messages
prompt = ChatPromptTemplate.from_messages([system_message_template, persona2UserMessage])
chain = prompt | llm | StrOutputParser()
return chain.stream({
"assetGlossary": asset,
"query": qte,
"chatHistory": chat_history,
"knowledge": knowledge
})