AashitaK commited on
Commit
48c165d
·
verified ·
1 Parent(s): 127a9af

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +9 -6
chatbot.py CHANGED
@@ -10,13 +10,16 @@ META_PROMPT = load_file("meta_prompt.txt")
10
  def construct_prompt(query: str, context_embeddings: dict, df: pd.DataFrame) -> tuple[str, str]:
11
  """
12
  Constructs a prompt for the language model based on the most relevant service description.
 
13
  This function identifies the most relevant service by comparing the query with precomputed
14
  document embeddings. It then formats the prompt to include an introduction, the service
15
  description as context, and the user's question.
 
16
  Parameters:
17
  query (str): The user's input question.
18
  context_embeddings (dict): A dictionary mapping service identifiers to their embeddings.
19
  df (pd.DataFrame): A DataFrame containing service descriptions and links.
 
20
  Returns:
21
  tuple[str, str]: A tuple containing the formatted prompt and the associated service link.
22
  """
@@ -49,14 +52,17 @@ def answer_query_with_context(
49
  ) -> str:
50
  """
51
  Generates a response to a user's query using the most relevant service description.
 
52
  This function constructs a prompt by attaching the most relevant service description to the user's query,
53
  sends this prompt to the language model to generate a response, and appends additional service-related
54
  information to this response.
 
55
  Parameters:
56
  query (str): The user's input question.
57
  df (pd.DataFrame): A DataFrame containing service descriptions and links.
58
  document_embeddings (dict): A dictionary mapping service identifiers to their embeddings.
59
  show_prompt (bool, optional): If True, displays the constructed prompt (for debugging). Defaults to False.
 
60
  Returns:
61
  str: The final response from the chatbot, including the generated answer and additional service details.
62
  """
@@ -68,7 +74,7 @@ def answer_query_with_context(
68
  {"role": "system", "content": META_PROMPT},
69
  {"role": "user", "content": prompt}
70
  ]
71
- response = get_response(message=prompt, system_message=META_PROMPT)
72
 
73
  # Append additional service-related information
74
  end_message = (
@@ -78,10 +84,7 @@ def answer_query_with_context(
78
  "Helpdesk representatives are also available for a remote chat session during normal hours (Monday - Friday, "
79
  "8:00 AM - 5:00 PM PST) via https://helpdesk.hmc.edu"
80
  )
81
- response_str = ""
82
- for element in response:
83
- response_str += element
84
- reply = response_str + end_message
85
 
86
  return reply
87
-
 
10
  def construct_prompt(query: str, context_embeddings: dict, df: pd.DataFrame) -> tuple[str, str]:
11
  """
12
  Constructs a prompt for the language model based on the most relevant service description.
13
+
14
  This function identifies the most relevant service by comparing the query with precomputed
15
  document embeddings. It then formats the prompt to include an introduction, the service
16
  description as context, and the user's question.
17
+
18
  Parameters:
19
  query (str): The user's input question.
20
  context_embeddings (dict): A dictionary mapping service identifiers to their embeddings.
21
  df (pd.DataFrame): A DataFrame containing service descriptions and links.
22
+
23
  Returns:
24
  tuple[str, str]: A tuple containing the formatted prompt and the associated service link.
25
  """
 
52
  ) -> str:
53
  """
54
  Generates a response to a user's query using the most relevant service description.
55
+
56
  This function constructs a prompt by attaching the most relevant service description to the user's query,
57
  sends this prompt to the language model to generate a response, and appends additional service-related
58
  information to this response.
59
+
60
  Parameters:
61
  query (str): The user's input question.
62
  df (pd.DataFrame): A DataFrame containing service descriptions and links.
63
  document_embeddings (dict): A dictionary mapping service identifiers to their embeddings.
64
  show_prompt (bool, optional): If True, displays the constructed prompt (for debugging). Defaults to False.
65
+
66
  Returns:
67
  str: The final response from the chatbot, including the generated answer and additional service details.
68
  """
 
74
  {"role": "system", "content": META_PROMPT},
75
  {"role": "user", "content": prompt}
76
  ]
77
+ response = get_response(messages)
78
 
79
  # Append additional service-related information
80
  end_message = (
 
84
  "Helpdesk representatives are also available for a remote chat session during normal hours (Monday - Friday, "
85
  "8:00 AM - 5:00 PM PST) via https://helpdesk.hmc.edu"
86
  )
87
+
88
+ reply = response + end_message
 
 
89
 
90
  return reply