VyLala commited on
Commit
7d6fd70
·
verified ·
1 Parent(s): 43014a2

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +5 -3
model.py CHANGED
@@ -1121,8 +1121,10 @@ def query_document_info(query_word, alternative_query_word, metadata, master_str
1121
  if run_rag:
1122
  print("try run rag")
1123
  # Determine the phrase for LLM query
1124
- rag_query_phrase = f"'{query_word}'"
1125
- if alternative_query_word_cleaned and alternative_query_word_cleaned != query_word:
 
 
1126
  rag_query_phrase += f" or its alternative word '{alternative_query_word_cleaned}'"
1127
 
1128
  # Construct a more specific semantic query phrase for embedding if structured info is available
@@ -1301,7 +1303,7 @@ def query_document_info(query_word, alternative_query_word, metadata, master_str
1301
  f"The text explicitly states BRU18 in the context of brunei (borneo), indicating the country and a broader geographic region within that country."
1302
  f"The study is published in a journal, implying research on living individuals, hence modern."
1303
  )
1304
-
1305
  if model_ai:
1306
  print("back up to ", model_ai)
1307
  llm_response_text, model_instance = call_llm_api(prompt_for_llm, model=model_ai)
 
1121
  if run_rag:
1122
  print("try run rag")
1123
  # Determine the phrase for LLM query
1124
+ rag_query_phrase = ""
1125
+ if query_word.lower() != "unknown":
1126
+ rag_query_phrase += f"'{query_word}'"
1127
+ if alternative_query_word_cleaned and alternative_query_word_cleaned != query_word and alternative_query_word_cleaned.lower() != "unknown":
1128
  rag_query_phrase += f" or its alternative word '{alternative_query_word_cleaned}'"
1129
 
1130
  # Construct a more specific semantic query phrase for embedding if structured info is available
 
1303
  f"The text explicitly states BRU18 in the context of brunei (borneo), indicating the country and a broader geographic region within that country."
1304
  f"The study is published in a journal, implying research on living individuals, hence modern."
1305
  )
1306
+ print("this is prompt: ", prompt_for_llm)
1307
  if model_ai:
1308
  print("back up to ", model_ai)
1309
  llm_response_text, model_instance = call_llm_api(prompt_for_llm, model=model_ai)