KitHung commited on
Commit
05ecba8
·
1 Parent(s): 72ef9d7

Update: v1.4

Browse files
Files changed (1) hide show
  1. app.py +95 -50
app.py CHANGED
@@ -1,79 +1,124 @@
1
  import streamlit as st
 
2
  from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
3
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
4
  from llama_index.legacy.callbacks import CallbackManager
5
  from llama_index.llms.openai_like import OpenAILike
 
6
 
7
- # Create an instance of CallbackManager
8
- callback_manager = CallbackManager()
 
9
 
10
- api_base_url = "https://api.siliconflow.cn/v1"
11
- model = "internlm/internlm2_5-7b-chat"
12
- api_key = st.secrets["API_KEY"]
 
 
 
13
 
14
- llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True, callback_manager=callback_manager)
 
15
 
16
-
17
-
18
- st.set_page_config(page_title="ai_assistant_demo", page_icon="😄")
19
- st.title("AI Assistant Demo")
20
-
21
- # 初始化模型
22
- @st.cache_resource
23
  def init_models():
24
- embed_model = HuggingFaceEmbedding(
25
- model_name="/home/user/model/paraphrase-multilingual-MiniLM-L12-v2"
26
- )
27
- Settings.embed_model = embed_model
28
-
29
- #用初始化llm
30
- Settings.llm = llm
31
-
32
- documents = SimpleDirectoryReader("/home/user/data").load_data()
33
- index = VectorStoreIndex.from_documents(documents)
34
- query_engine = index.as_query_engine()
35
-
36
- return query_engine
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  # 检查是否需要初始化模型
39
  if 'query_engine' not in st.session_state:
40
  st.session_state['query_engine'] = init_models()
41
 
42
- def greet2(question):
43
- response = st.session_state['query_engine'].query(question)
44
- return response
45
-
46
-
47
- # Store LLM generated responses
48
- if "messages" not in st.session_state.keys():
49
- st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}]
 
 
 
 
 
 
 
50
 
51
- # Display or clear chat messages
52
  for message in st.session_state.messages:
53
  with st.chat_message(message["role"]):
54
  st.write(message["content"])
55
 
 
56
  def clear_chat_history():
57
  st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}]
58
 
59
- st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
60
-
61
- # Function for generating LLaMA2 response
62
- def generate_llama_index_response(prompt_input):
63
- return greet2(prompt_input)
64
 
65
- # User-provided prompt
66
  if prompt := st.chat_input():
67
  st.session_state.messages.append({"role": "user", "content": prompt})
68
  with st.chat_message("user"):
69
  st.write(prompt)
70
 
71
- # Gegenerate_llama_index_response last message is not from assistant
72
- if st.session_state.messages[-1]["role"] != "assistant":
73
- with st.chat_message("assistant"):
74
- with st.spinner("Thinking..."):
75
- response = generate_llama_index_response(prompt)
76
- placeholder = st.empty()
77
- placeholder.markdown(response)
78
- message = {"role": "assistant", "content": response.response}
79
- st.session_state.messages.append(message)
 
1
  import streamlit as st
2
+ import logging
3
  from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
4
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
5
  from llama_index.legacy.callbacks import CallbackManager
6
  from llama_index.llms.openai_like import OpenAILike
7
+ import os
8
 
9
+ # 设置日志
10
+ logging.basicConfig(level=logging.INFO)
11
+ logger = logging.getLogger(__name__)
12
 
13
+ # 页面配置
14
+ st.set_page_config(
15
+ page_title="AI Assistant Demo",
16
+ page_icon="😄",
17
+ layout="wide" # 使用宽布局
18
+ )
19
 
20
+ # 显示加载状态
21
+ status_placeholder = st.empty()
22
 
 
 
 
 
 
 
 
23
  def init_models():
24
+ try:
25
+ logger.info("Starting model initialization...")
26
+ status_placeholder.text("正在初始化模型...")
27
+
28
+ # 初始化 API 设置
29
+ api_key = os.getenv("API_KEY")
30
+ if not api_key:
31
+ raise ValueError("API_KEY environment variable is not set")
32
+
33
+ api_base_url = "https://api.siliconflow.cn/v1"
34
+ model = "internlm/internlm2_5-7b-chat"
35
+
36
+ logger.info("Initializing callback manager...")
37
+ callback_manager = CallbackManager()
38
+
39
+ logger.info("Initializing LLM...")
40
+ llm = OpenAILike(
41
+ model=model,
42
+ api_base=api_base_url,
43
+ api_key=api_key,
44
+ is_chat_model=True,
45
+ callback_manager=callback_manager
46
+ )
47
+ Settings.llm = llm
48
+
49
+ logger.info("Initializing embedding model...")
50
+ embed_model = HuggingFaceEmbedding(
51
+ model_name="/home/user/model/paraphrase-multilingual-MiniLM-L12-v2"
52
+ )
53
+ Settings.embed_model = embed_model
54
+
55
+ logger.info("Loading documents...")
56
+ documents = SimpleDirectoryReader("/home/user/data").load_data()
57
+
58
+ logger.info("Creating vector store index...")
59
+ index = VectorStoreIndex.from_documents(documents)
60
+
61
+ logger.info("Creating query engine...")
62
+ query_engine = index.as_query_engine()
63
+
64
+ logger.info("Model initialization completed successfully!")
65
+ status_placeholder.empty()
66
+ return query_engine
67
+
68
+ except Exception as e:
69
+ error_msg = f"Error during initialization: {str(e)}"
70
+ logger.error(error_msg)
71
+ st.error(error_msg)
72
+ raise
73
+
74
+ # 初始化标题和说明
75
+ st.title("AI Assistant Demo")
76
+ st.markdown("---")
77
 
78
  # 检查是否需要初始化模型
79
  if 'query_engine' not in st.session_state:
80
  st.session_state['query_engine'] = init_models()
81
 
82
+ def generate_response(question):
83
+ try:
84
+ logger.info(f"Generating response for question: {question}")
85
+ response = st.session_state['query_engine'].query(question)
86
+ logger.info("Response generated successfully")
87
+ return response
88
+ except Exception as e:
89
+ error_msg = f"Error generating response: {str(e)}"
90
+ logger.error(error_msg)
91
+ st.error(error_msg)
92
+ return None
93
+
94
+ # 初始化消息历史
95
+ if "messages" not in st.session_state:
96
+ st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}]
97
 
98
+ # 显示消息历史
99
  for message in st.session_state.messages:
100
  with st.chat_message(message["role"]):
101
  st.write(message["content"])
102
 
103
+ # 清除聊天历史的功能
104
  def clear_chat_history():
105
  st.session_state.messages = [{"role": "assistant", "content": "你好,我是你的助手,有什么我可以帮助你的吗?"}]
106
 
107
+ # 侧边栏按钮
108
+ st.sidebar.button('清除聊天历史', on_click=clear_chat_history)
 
 
 
109
 
110
+ # 用户输入处理
111
  if prompt := st.chat_input():
112
  st.session_state.messages.append({"role": "user", "content": prompt})
113
  with st.chat_message("user"):
114
  st.write(prompt)
115
 
116
+ # 生成回复
117
+ if st.session_state.messages[-1]["role"] != "assistant":
118
+ with st.chat_message("assistant"):
119
+ with st.spinner("思考中..."):
120
+ response = generate_response(prompt)
121
+ if response:
122
+ st.markdown(response)
123
+ message = {"role": "assistant", "content": response.response}
124
+ st.session_state.messages.append(message)