Spaces:
Sleeping
Sleeping
Update excel_chat.py
Browse files- excel_chat.py +21 -7
excel_chat.py
CHANGED
|
@@ -12,7 +12,7 @@ import zipfile
|
|
| 12 |
|
| 13 |
#users = ['maksG', 'AlmaA', 'YchK']
|
| 14 |
|
| 15 |
-
def ask_llm(query, user_input, client_index, user):
|
| 16 |
messages = [
|
| 17 |
{
|
| 18 |
"role": "system",
|
|
@@ -58,11 +58,21 @@ def ask_llm(query, user_input, client_index, user):
|
|
| 58 |
return response.content[0].text
|
| 59 |
|
| 60 |
elif client_index == "Groq (mixtral)":
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
|
| 67 |
else:
|
| 68 |
raise ValueError("Unsupported client index provided")
|
|
@@ -86,6 +96,10 @@ def filter_df(df, column_name, keywords):
|
|
| 86 |
return filtered_df
|
| 87 |
|
| 88 |
def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col, keywords, client, user):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
new_prompts, new_keywords, new_user, conf_file_path = update_json(user, prompt, keywords)
|
| 90 |
print(f'xlsxfile = {excel_file}')
|
| 91 |
df = pd.read_excel(excel_file)
|
|
@@ -105,7 +119,7 @@ def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col
|
|
| 105 |
for index, row in filtred_df.iterrows():
|
| 106 |
concatenated_content = "\n\n".join(f"{column_name}: {str(row[column_name])}" for column_name in source_cols)
|
| 107 |
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_cols):
|
| 108 |
-
llm_answer = ask_llm(prompt[0], concatenated_content, client, user)
|
| 109 |
print(f"{cpt}/{len(filtred_df)}\nQUERY:\n{prompt[0]}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
| 110 |
df.at[index, dest_col] = llm_answer
|
| 111 |
cpt += 1
|
|
|
|
| 12 |
|
| 13 |
#users = ['maksG', 'AlmaA', 'YchK']
|
| 14 |
|
| 15 |
+
def ask_llm(query, user_input, client_index, user, keys):
|
| 16 |
messages = [
|
| 17 |
{
|
| 18 |
"role": "system",
|
|
|
|
| 58 |
return response.content[0].text
|
| 59 |
|
| 60 |
elif client_index == "Groq (mixtral)":
|
| 61 |
+
try:
|
| 62 |
+
client = Groq(api_key=os.environ[user['api_keys'][keys[0]]])
|
| 63 |
+
chat_completion = client.chat.completions.create(
|
| 64 |
+
messages=messages,
|
| 65 |
+
model='mixtral-8x7b-32768',
|
| 66 |
+
)
|
| 67 |
+
response = chat_completion.choices[0].message.content
|
| 68 |
+
except Exception as e:
|
| 69 |
+
print("Change key")
|
| 70 |
+
if keys[0] == keys[1][0]:
|
| 71 |
+
keys[0] = keys[1][1]
|
| 72 |
+
elif keys[0] == keys[1][1]:
|
| 73 |
+
keys[0] = keys[1][2]
|
| 74 |
+
else:
|
| 75 |
+
keys[0] = keys[1][0]
|
| 76 |
|
| 77 |
else:
|
| 78 |
raise ValueError("Unsupported client index provided")
|
|
|
|
| 96 |
return filtered_df
|
| 97 |
|
| 98 |
def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col, keywords, client, user):
|
| 99 |
+
# API Keys for Groq :
|
| 100 |
+
KEYS = ['groqkey', 'groqkey2', 'groqkey3']
|
| 101 |
+
GroqKey = KEYS[0]
|
| 102 |
+
|
| 103 |
new_prompts, new_keywords, new_user, conf_file_path = update_json(user, prompt, keywords)
|
| 104 |
print(f'xlsxfile = {excel_file}')
|
| 105 |
df = pd.read_excel(excel_file)
|
|
|
|
| 119 |
for index, row in filtred_df.iterrows():
|
| 120 |
concatenated_content = "\n\n".join(f"{column_name}: {str(row[column_name])}" for column_name in source_cols)
|
| 121 |
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_cols):
|
| 122 |
+
llm_answer = ask_llm(prompt[0], concatenated_content, client, user, [GroqKey, KEYS])
|
| 123 |
print(f"{cpt}/{len(filtred_df)}\nQUERY:\n{prompt[0]}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
| 124 |
df.at[index, dest_col] = llm_answer
|
| 125 |
cpt += 1
|