charesz commited on
Commit
f9e9ed3
·
verified ·
1 Parent(s): 24f603d

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +11 -11
utils.py CHANGED
@@ -3,19 +3,20 @@ import pandas as pd
3
  import streamlit as st
4
  from huggingface_hub import InferenceClient
5
 
6
- # Initialize Hugging Face Inference client using the secret
7
- hf_token = st.secrets.get("HF_TOKEN")
8
- if not hf_token:
9
- st.error("HF_TOKEN not found in secrets. Please add it.")
10
- st.stop()
11
-
12
- client = InferenceClient(token=hf_token)
13
 
14
  def query_agent_from_csv(file_bytes, user_query, model_repo="mistralai/Mistral-7B-Instruct-v0.3"):
15
  """
16
  Reads a CSV and queries the Hugging Face Mistral model.
17
  Returns the model's answer as string.
18
  """
 
 
19
  try:
20
  # --- Step 1: Load CSV ---
21
  try:
@@ -24,18 +25,17 @@ def query_agent_from_csv(file_bytes, user_query, model_repo="mistralai/Mistral-7
24
  file_bytes.seek(0)
25
  df = pd.read_csv(file_bytes, encoding="latin1")
26
 
27
- # Limit columns to avoid huge inputs
28
  MAX_COLS = 50
29
  if df.shape[1] > MAX_COLS:
30
  df = df.iloc[:, :MAX_COLS]
31
 
32
- # --- Step 2: Summarize dataset for model context ---
33
  summary = f"The dataset has {df.shape[0]} rows and {df.shape[1]} columns.\n"
34
  summary += "Columns: " + ", ".join(df.columns[:10])
35
  if df.shape[1] > 10:
36
  summary += ", ..."
37
 
38
- # --- Step 3: Build messages for chat API ---
39
  messages = [
40
  {"role": "system", "content": (
41
  "You are a professional data analyst. "
@@ -45,7 +45,7 @@ def query_agent_from_csv(file_bytes, user_query, model_repo="mistralai/Mistral-7
45
  {"role": "user", "content": f"Question: {user_query}"}
46
  ]
47
 
48
- # --- Step 4: Query the model ---
49
  response = client.chat_completion(
50
  model=model_repo,
51
  messages=messages,
 
3
  import streamlit as st
4
  from huggingface_hub import InferenceClient
5
 
6
+ def get_hf_client():
7
+ hf_token = st.secrets.get("HF_TOKEN")
8
+ if not hf_token:
9
+ st.error("HF_TOKEN not found in Streamlit secrets!")
10
+ st.stop()
11
+ return InferenceClient(token=hf_token)
 
12
 
13
  def query_agent_from_csv(file_bytes, user_query, model_repo="mistralai/Mistral-7B-Instruct-v0.3"):
14
  """
15
  Reads a CSV and queries the Hugging Face Mistral model.
16
  Returns the model's answer as string.
17
  """
18
+ client = get_hf_client() # Initialize client inside function
19
+
20
  try:
21
  # --- Step 1: Load CSV ---
22
  try:
 
25
  file_bytes.seek(0)
26
  df = pd.read_csv(file_bytes, encoding="latin1")
27
 
 
28
  MAX_COLS = 50
29
  if df.shape[1] > MAX_COLS:
30
  df = df.iloc[:, :MAX_COLS]
31
 
32
+ # --- Step 2: Summarize dataset ---
33
  summary = f"The dataset has {df.shape[0]} rows and {df.shape[1]} columns.\n"
34
  summary += "Columns: " + ", ".join(df.columns[:10])
35
  if df.shape[1] > 10:
36
  summary += ", ..."
37
 
38
+ # --- Step 3: Build messages ---
39
  messages = [
40
  {"role": "system", "content": (
41
  "You are a professional data analyst. "
 
45
  {"role": "user", "content": f"Question: {user_query}"}
46
  ]
47
 
48
+ # --- Step 4: Query model ---
49
  response = client.chat_completion(
50
  model=model_repo,
51
  messages=messages,