SHAMIL SHAHBAZ AWAN commited on
Commit
2f92c73
·
verified ·
1 Parent(s): 4ff6dd8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -18
app.py CHANGED
@@ -2,11 +2,7 @@ import streamlit as st
2
  import pandas as pd
3
  import matplotlib.pyplot as plt
4
  import seaborn as sns
5
-
6
- try:
7
- from groqflow.groqmodel import GroqModel
8
- except ImportError as e:
9
- st.error(f"Failed to import GroqModel. Please ensure all dependencies are installed correctly. Error: {e}")
10
 
11
  # Configure page
12
  st.set_page_config(page_title="Data Augmentation App", layout="wide")
@@ -29,14 +25,20 @@ st.title("Data Augmentation and Analysis App")
29
  st.sidebar.title("Upload Your File")
30
  st.sidebar.markdown("Supported formats: CSV, Excel")
31
 
32
- # Load Groq API key from secrets
33
- try:
34
- groq_api_key = st.secrets["HUGGINGFACE_KEY"]
35
- groq_model = GroqModel("llama3-8b-8192", api_key=groq_api_key)
36
- except KeyError:
37
- st.error("API key not found in secrets. Please configure your `HUGGINGFACE_KEY` in Streamlit secrets.")
38
- except Exception as e:
39
- st.error(f"Error initializing GroqModel: {e}")
 
 
 
 
 
 
40
 
41
  def load_file(uploaded_file):
42
  """Load the uploaded file."""
@@ -65,14 +67,14 @@ def generate_graph(data, query):
65
  st.error(f"Error generating graph: {e}")
66
 
67
  def handle_query(data, query):
68
- """Handle user query using Groq API."""
69
  try:
70
- if not groq_model:
71
- st.error("GroqModel is not initialized. Check for errors in setup.")
72
  return
73
  prompt = f"Given the dataset: {data.to_dict(orient='records')}, answer the following: {query}"
74
- response = groq_model.generate(prompt)
75
- st.write("Response:", response)
76
  except Exception as e:
77
  st.error(f"Error in LLM processing: {e}")
78
 
 
2
  import pandas as pd
3
  import matplotlib.pyplot as plt
4
  import seaborn as sns
5
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
 
 
 
 
6
 
7
  # Configure page
8
  st.set_page_config(page_title="Data Augmentation App", layout="wide")
 
25
  st.sidebar.title("Upload Your File")
26
  st.sidebar.markdown("Supported formats: CSV, Excel")
27
 
28
+ # Get the Hugging Face API key from secrets
29
+ hf_api_key = st.secrets.get("HUGGINGFACE_KEY")
30
+ if not hf_api_key:
31
+ st.error("Hugging Face API key not found in secrets.")
32
+ else:
33
+ # Initialize the model and tokenizer using the API key
34
+ try:
35
+ model_name = "llama3-70b-8192" # Replace with the correct model name if needed
36
+ model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=hf_api_key)
37
+ tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=hf_api_key)
38
+ llm_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
39
+ st.success(f"Model {model_name} initialized successfully!")
40
+ except Exception as e:
41
+ st.error(f"Error initializing model: {e}")
42
 
43
  def load_file(uploaded_file):
44
  """Load the uploaded file."""
 
67
  st.error(f"Error generating graph: {e}")
68
 
69
  def handle_query(data, query):
70
+ """Handle user query using the LLM."""
71
  try:
72
+ if not llm_pipeline:
73
+ st.error("LLM pipeline is not initialized. Check for errors in setup.")
74
  return
75
  prompt = f"Given the dataset: {data.to_dict(orient='records')}, answer the following: {query}"
76
+ response = llm_pipeline(prompt, max_length=200, num_return_sequences=1)
77
+ st.write("Response:", response[0]['generated_text'])
78
  except Exception as e:
79
  st.error(f"Error in LLM processing: {e}")
80