admin08077 commited on
Commit
c0195ca
·
verified ·
1 Parent(s): cc25a12

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -10
app.py CHANGED
@@ -3,7 +3,6 @@ from huggingface_hub import InferenceClient
3
  import nltk
4
  import PyPDF2
5
 
6
- # Download required NLTK resources
7
  nltk.download("punkt", quiet=True)
8
  nltk.download("punkt_tab", quiet=True)
9
 
@@ -14,12 +13,7 @@ def respond_chunked(message, history, system_message, max_tokens, temperature, t
14
  combined_response = ""
15
  for chunk in chunks:
16
  chunked_system_message = f"{system_message}\n\nFile Content Chunk:\n{chunk}"
17
- messages = [{"role": "system", "content": chunked_system_message}]
18
- for user, assistant in history:
19
- if user:
20
- messages.append({"role": "user", "content": user})
21
- if assistant:
22
- messages.append({"role": "assistant", "content": assistant})
23
  messages.append({"role": "user", "content": message})
24
  try:
25
  completion = client.chat_completion(
@@ -104,9 +98,11 @@ with gr.Blocks() as demo:
104
  def chat_function(user_message, history, file_content, system_prompt, max_tokens, temperature, top_p):
105
  if not user_message.strip():
106
  return "", history
107
- history.append((user_message, ""))
108
  assistant_response = respond_chunked(user_message, history, system_prompt, max_tokens, temperature, top_p, file_content)
109
- history[-1] = (user_message, assistant_response)
 
 
110
  return "", history
111
 
112
  send_button = gr.Button("Send")
@@ -122,4 +118,4 @@ with gr.Blocks() as demo:
122
  outputs=[user_input, chatbot]
123
  )
124
 
125
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
3
  import nltk
4
  import PyPDF2
5
 
 
6
  nltk.download("punkt", quiet=True)
7
  nltk.download("punkt_tab", quiet=True)
8
 
 
13
  combined_response = ""
14
  for chunk in chunks:
15
  chunked_system_message = f"{system_message}\n\nFile Content Chunk:\n{chunk}"
16
+ messages = [{"role": "system", "content": chunked_system_message}] + history
 
 
 
 
 
17
  messages.append({"role": "user", "content": message})
18
  try:
19
  completion = client.chat_completion(
 
98
  def chat_function(user_message, history, file_content, system_prompt, max_tokens, temperature, top_p):
99
  if not user_message.strip():
100
  return "", history
101
+ # Get response from the model with chunking
102
  assistant_response = respond_chunked(user_message, history, system_prompt, max_tokens, temperature, top_p, file_content)
103
+ # Append user and assistant messages in the correct format
104
+ history.append({"role": "user", "content": user_message})
105
+ history.append({"role": "assistant", "content": assistant_response})
106
  return "", history
107
 
108
  send_button = gr.Button("Send")
 
118
  outputs=[user_input, chatbot]
119
  )
120
 
121
+ demo.launch(server_name="0.0.0.0", server_port=7860, share=True)