galuhalifani commited on
Commit
b15cb21
Β·
1 Parent(s): 1889749

add usage loggers

Browse files
src/handler.py CHANGED
@@ -130,13 +130,14 @@ def translate_text(lang, text):
130
  except Exception as e:
131
  return text
132
 
133
- def add_question_ticker():
134
  try:
135
  web_counter_collection.update_one(
136
  {"type": "web"},
137
  {
138
  "$inc": {"counter": +1},
139
- "$set": {"timestamp": datetime.now(timezone.utc)}
 
140
  },
141
  upsert=True
142
  )
 
130
  except Exception as e:
131
  return text
132
 
133
+ def add_question_ticker(query=None):
134
  try:
135
  web_counter_collection.update_one(
136
  {"type": "web"},
137
  {
138
  "$inc": {"counter": +1},
139
+ "$set": {"timestamp": datetime.now(timezone.utc)},
140
+ "$set": {"query": query}
141
  },
142
  upsert=True
143
  )
src/model.py CHANGED
@@ -8,7 +8,7 @@ import os
8
  import re
9
  from prompt import PROFESSIONAL_PROMPT
10
  from langchain.chains import ConversationalRetrievalChain
11
- from langchain.memory import ConversationBufferMemory
12
  from datetime import datetime, timezone
13
  from handler import is_feedback_message, extract_feedback_content, OPENAI_KEY, collection, store_last_qna, add_question_ticker
14
 
@@ -20,14 +20,15 @@ embeddings = OpenAIEmbeddings(
20
  )
21
 
22
  def init_memory():
23
- return ConversationBufferMemory(memory_key="chat_history", return_messages=True, output_key='answer')
24
 
25
  memory_store = {}
26
 
27
- def get_user_memory(user_id: str) -> ConversationBufferMemory:
28
  if user_id not in memory_store:
29
- memory_store[user_id] = ConversationBufferMemory(
30
  memory_key="chat_history",
 
31
  return_messages=True,
32
  output_key="answer"
33
  )
@@ -53,8 +54,9 @@ llm = ChatOpenAI(
53
  qa_chains = {}
54
 
55
  def create_conversational_chain(user_id: None):
56
- memory = get_user_memory(user_id) if user_id else init_memory()
57
 
 
58
  retriever = vector_store.as_retriever(
59
  search_type="similarity",
60
  search_kwargs={"k": 3, "score_threshold": 0.8}
@@ -78,13 +80,12 @@ def clean_answer(raw_answer):
78
 
79
  def ask(query, user_id="anonymous"):
80
  try:
81
- if user_id != "anonymous":
82
- qa = create_conversational_chain(user_id)
83
- else:
84
- qa = create_conversational_chain("anonymous")
85
-
86
  result = qa({"question": query})
87
- add_question_ticker()
 
 
88
 
89
  if not result['source_documents']:
90
  return f"Sorry, no relevant information found on the question asked. Please contact immigration customer service through https://www.imigrasi.go.id/hubungi."
 
8
  import re
9
  from prompt import PROFESSIONAL_PROMPT
10
  from langchain.chains import ConversationalRetrievalChain
11
+ from langchain.memory import ConversationBufferMemory, ConversationBufferWindowMemory
12
  from datetime import datetime, timezone
13
  from handler import is_feedback_message, extract_feedback_content, OPENAI_KEY, collection, store_last_qna, add_question_ticker
14
 
 
20
  )
21
 
22
  def init_memory():
23
+ return ConversationBufferWindowMemory(memory_key="chat_history", k=3, return_messages=True, output_key='answer')
24
 
25
  memory_store = {}
26
 
27
+ def get_user_memory(user_id: str) -> ConversationBufferWindowMemory:
28
  if user_id not in memory_store:
29
+ memory_store[user_id] = ConversationBufferWindowMemory(
30
  memory_key="chat_history",
31
+ k=3,
32
  return_messages=True,
33
  output_key="answer"
34
  )
 
54
  qa_chains = {}
55
 
56
  def create_conversational_chain(user_id: None):
57
+ memory = init_memory()
58
 
59
+ print(f"🧠 Total memory messages: {len(memory.chat_memory.messages)}")
60
  retriever = vector_store.as_retriever(
61
  search_type="similarity",
62
  search_kwargs={"k": 3, "score_threshold": 0.8}
 
80
 
81
  def ask(query, user_id="anonymous"):
82
  try:
83
+ qa = create_conversational_chain("anonymous")
84
+ add_question_ticker(query)
 
 
 
85
  result = qa({"question": query})
86
+
87
+ usage = result.get('__raw', {}).get('usage', {})
88
+ print(f"πŸ”πŸ”πŸ”πŸ”πŸ”πŸ” Tokens used: {usage}")
89
 
90
  if not result['source_documents']:
91
  return f"Sorry, no relevant information found on the question asked. Please contact immigration customer service through https://www.imigrasi.go.id/hubungi."
whatsapp_flask/Dockerfile DELETED
@@ -1,12 +0,0 @@
1
- FROM python:3.9-slim
2
-
3
- WORKDIR /app
4
-
5
- COPY . .
6
-
7
- RUN pip install --no-cache-dir -r requirements.txt
8
-
9
- ENV PORT=5000
10
- EXPOSE 5000
11
-
12
- CMD ["python", "app.py"]
 
 
 
 
 
 
 
 
 
 
 
 
 
whatsapp_flask/app.py DELETED
@@ -1,34 +0,0 @@
1
- from flask import Flask, request, jsonify
2
- from model import ask, check_question_feedback
3
- from handler import save_feedback
4
-
5
- app = Flask(__name__)
6
-
7
- @app.route("/")
8
- def root():
9
- return "Instant Immigration Bot API is running."
10
-
11
- @app.route("/ask", methods=["POST"])
12
- def handle_question():
13
- data = request.json
14
- query = data.get("query")
15
- user_id = data.get("user_id", "anonymous")
16
-
17
- if not query:
18
- return jsonify({"error": "No query provided"}), 400
19
-
20
- result = check_question_feedback(query, user_id)
21
-
22
- if result["is_feedback"]:
23
- last_qna = result["last_qna"]
24
- if not last_qna["question"]:
25
- return jsonify({"message": "Sorry, please ask a question first before providing feedback."}), 400
26
-
27
- save_result = save_feedback(result["feedback_obj"], last_qna)
28
- return jsonify({"message": save_result}), 200
29
-
30
- answer = ask(query, user_id)
31
- return jsonify({"answer": answer})
32
-
33
- if __name__ == "__main__":
34
- app.run(host="0.0.0.0", port=5000)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
whatsapp_flask/requirements.txt DELETED
@@ -1,11 +0,0 @@
1
- huggingface_hub==0.25.2
2
- streamlit
3
- streamlit-option-menu
4
- pymongo
5
- langchain
6
- langchain-openai
7
- langchain-mongodb
8
- python-dotenv
9
- openai
10
- datetime
11
- flask