ivxivx commited on
Commit
f9e55e7
·
unverified ·
1 Parent(s): ee304fe

chore: dep

Browse files
Files changed (2) hide show
  1. app.py +6 -1
  2. pyproject.toml → requirements.txt +7 -4
app.py CHANGED
@@ -59,8 +59,13 @@ examples = [
59
  ]
60
 
61
  def predict(message, history):
 
 
62
  if not history or history[0].get("role") != "system":
63
- history = [{"role": "system", "content": system_prompt}] + history
 
 
 
64
  history.append({"role": "user", "content": message})
65
 
66
  # 1. Build prompt from history using chat template
 
59
  ]
60
 
61
  def predict(message, history):
62
+ # Always inject the user message into the system prompt's {input} placeholder
63
+ sys_prompt = system_prompt.replace("{input}", message)
64
  if not history or history[0].get("role") != "system":
65
+ history = [{"role": "system", "content": sys_prompt}] + history
66
+ else:
67
+ history[0]["content"] = sys_prompt
68
+
69
  history.append({"role": "user", "content": message})
70
 
71
  # 1. Build prompt from history using chat template
pyproject.toml → requirements.txt RENAMED
@@ -1,13 +1,16 @@
1
  [project]
2
- name = "hf-customer-service-chatbot"
3
  version = "0.1.0"
4
- description = "A customer service chatbot using Hugging Face Transformers and Gradio"
5
  requires-python = "==3.11.*"
6
 
7
  dependencies = [
8
  "python-dotenv>=1.0.1",
9
- "huggingface-hub>=0.27.0",
10
  "accelerate>=1.3.0",
 
 
11
  "transformers==4.49.0",
12
- "gradio>=5.23.0"
 
 
13
  ]
 
1
  [project]
2
+ name = "ai-customer-service-chatbot"
3
  version = "0.1.0"
4
+ description = "A customer service chatbot using Hugging Face, LangChain, Gradio, and Streamlit"
5
  requires-python = "==3.11.*"
6
 
7
  dependencies = [
8
  "python-dotenv>=1.0.1",
 
9
  "accelerate>=1.3.0",
10
+ "faiss-cpu>=1.10.0",
11
+ "safetensors>=0.4.5",
12
  "transformers==4.49.0",
13
+ "gradio>=5.23.0",
14
+ "datasets",
15
+ "bitsandbytes"
16
  ]