davidfearne commited on
Commit
fa02206
·
verified ·
1 Parent(s): 93467f0

Update azure_openai.py

Browse files
Files changed (1) hide show
  1. azure_openai.py +50 -26
azure_openai.py CHANGED
@@ -1,16 +1,14 @@
1
  # openai
2
- import os
3
  import streamlit as st
4
  # from langchain.chat_models import AzureChatOpenAI
5
  from langchain_openai import AzureChatOpenAI
6
  from langchain_core.output_parsers import StrOutputParser
7
  from langchain_core.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate
8
 
9
- # Initialize an instance of AzureOpenAI using the specified settings
10
-
11
 
12
  # LLM Langchain Definition
13
- OPENAI_API_KEY = st.secrets['azure_api_key']
 
14
  OPENAI_API_TYPE = "azure"
15
  OPENAI_API_BASE = "https://davidfearn-gpt4.openai.azure.com"
16
  # OPENAI_API_VERSION = "2024-02-01"
@@ -19,9 +17,26 @@ OPENAI_API_VERSION = "2024-08-01-preview"
19
  OPENAI_MODEL = "gpt-4o"
20
  # Initialize an instance of AzureOpenAI using the specified settings
21
 
 
 
 
22
 
23
- def get_res(user_query, history, knowledge, systemMessgae, userMessage, temp, tokens):
 
 
 
 
 
 
 
 
 
 
 
24
 
 
 
 
25
  llm = AzureChatOpenAI(
26
  openai_api_version=OPENAI_API_VERSION,
27
  openai_api_key=OPENAI_API_KEY,
@@ -31,6 +46,7 @@ def get_res(user_query, history, knowledge, systemMessgae, userMessage, temp, to
31
  temperature=temp,
32
  max_tokens=tokens # Name of the deployment for identification
33
  )
 
34
 
35
  system_message_template = SystemMessagePromptTemplate.from_template(systemMessgae)
36
  human_message_template = HumanMessagePromptTemplate.from_template(userMessage)
@@ -40,33 +56,41 @@ def get_res(user_query, history, knowledge, systemMessgae, userMessage, temp, to
40
 
41
  chain = prompt | llm | StrOutputParser()
42
 
43
- return chain.stream({
44
- "user_question": user_query,
45
- "history": history,
46
- "knowledge": knowledge
47
  })
48
 
49
- def qt(systemMessgae, history, temp, tokens):
50
 
 
 
51
  llm = AzureChatOpenAI(
52
- openai_api_version=OPENAI_API_VERSION,
53
- openai_api_key=OPENAI_API_KEY,
54
- azure_endpoint=OPENAI_API_BASE,
55
- openai_api_type=OPENAI_API_TYPE,
56
- deployment_name=OPENAI_MODEL,
57
- temperature=temp,
58
- max_tokens=tokens # Name of the deployment for identification
59
- )
60
- userMessage = """ Converstaion to date: {history}
61
- """
62
- system_message_template = SystemMessagePromptTemplate.from_template(systemMessgae)
63
- human_message_template = HumanMessagePromptTemplate.from_template(userMessage)
 
64
 
65
  # Create a chat prompt template combining system and human messages
66
- prompt = ChatPromptTemplate.from_messages([system_message_template, human_message_template])
67
 
68
  chain = prompt | llm | StrOutputParser()
69
 
70
- return chain.invoke({
71
- "history": history,
72
- })
 
 
 
 
 
 
 
 
1
  # openai
 
2
  import streamlit as st
3
  # from langchain.chat_models import AzureChatOpenAI
4
  from langchain_openai import AzureChatOpenAI
5
  from langchain_core.output_parsers import StrOutputParser
6
  from langchain_core.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate
7
 
 
 
8
 
9
  # LLM Langchain Definition
10
+ # OPENAI_API_KEY = st.secrets['azure_api_key']
11
+ OPENAI_API_KEY = "86b631a9c0294e9698e327c59ff5ac2c"
12
  OPENAI_API_TYPE = "azure"
13
  OPENAI_API_BASE = "https://davidfearn-gpt4.openai.azure.com"
14
  # OPENAI_API_VERSION = "2024-02-01"
 
17
  OPENAI_MODEL = "gpt-4o"
18
  # Initialize an instance of AzureOpenAI using the specified settings
19
 
20
+ def read_file(file):
21
+ """
22
+ Reads the content of a text file and returns it as a string.
23
 
24
+ :param approver: The type of approver.
25
+ :return: The content of the file as a string.
26
+ """
27
+ fp = f"assets/{file}.md"
28
+ try:
29
+ with open(fp, 'r', encoding='utf-8') as file:
30
+ content = file.read()
31
+ return content
32
+ except FileNotFoundError:
33
+ print(f"The file at {fp} was not found.")
34
+ except IOError:
35
+ print(f"An error occurred while reading the file at {fp}.")
36
 
37
+
38
+ def qt(systemMessgae, history, temp, tokens, file):
39
+ asset = read_file(file)
40
  llm = AzureChatOpenAI(
41
  openai_api_version=OPENAI_API_VERSION,
42
  openai_api_key=OPENAI_API_KEY,
 
46
  temperature=temp,
47
  max_tokens=tokens # Name of the deployment for identification
48
  )
49
+ userMessage = """{conversationToDate}"""
50
 
51
  system_message_template = SystemMessagePromptTemplate.from_template(systemMessgae)
52
  human_message_template = HumanMessagePromptTemplate.from_template(userMessage)
 
56
 
57
  chain = prompt | llm | StrOutputParser()
58
 
59
+ return chain.invoke({
60
+ "assetGlossary": asset,
61
+ "conversationToDate": history
 
62
  })
63
 
 
64
 
65
+ def get_response(chat_history, qte, knowledge, temp1, temp2, tokens1, tokens2, persona2SystemMessage, persona2UserMessage, file):
66
+ asset = read_file(file)
67
  llm = AzureChatOpenAI(
68
+ openai_api_version=OPENAI_API_VERSION,
69
+ openai_api_key=OPENAI_API_KEY,
70
+ azure_endpoint=OPENAI_API_BASE,
71
+ openai_api_type=OPENAI_API_TYPE,
72
+ deployment_name=OPENAI_MODEL,
73
+ temperature=temp2,
74
+ max_tokens=tokens2
75
+ # Name of the deployment for identification
76
+ )
77
+
78
+
79
+ system_message_template = SystemMessagePromptTemplate.from_template(persona2SystemMessage)
80
+
81
 
82
  # Create a chat prompt template combining system and human messages
83
+ prompt = ChatPromptTemplate.from_messages([system_message_template, persona2UserMessage])
84
 
85
  chain = prompt | llm | StrOutputParser()
86
 
87
+ print(asset)
88
+
89
+ return chain.stream({
90
+ "assetGlossary": asset,
91
+ "query": chat_history,
92
+ "knowledge": knowledge
93
+ })
94
+
95
+
96
+