added the llm classes to load right llm based on the selected provider
Browse files- .gitignore +13 -1
- src/__pycache__/main.cpython-312.pyc +0 -0
- src/llms/groq.py +25 -0
- src/llms/openai.py +22 -0
- src/main.py +21 -2
- src/ui/__pycache__/config.cpython-312.pyc +0 -0
- src/ui/__pycache__/load.cpython-312.pyc +0 -0
- src/ui/config.py +5 -5
- src/ui/load.py +3 -3
.gitignore
CHANGED
|
@@ -1 +1,13 @@
|
|
| 1 |
-
venv/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
venv/
|
| 2 |
+
env.txt/
|
| 3 |
+
__pycache__/
|
| 4 |
+
|
| 5 |
+
# Ignore __pycache__ directories everywhere
|
| 6 |
+
**/__pycache__/
|
| 7 |
+
|
| 8 |
+
# Specific paths to ignore
|
| 9 |
+
llms/__pycache__/
|
| 10 |
+
ui/__pycache__/
|
| 11 |
+
|
| 12 |
+
# Environment files
|
| 13 |
+
env.txt
|
src/__pycache__/main.cpython-312.pyc
CHANGED
|
Binary files a/src/__pycache__/main.cpython-312.pyc and b/src/__pycache__/main.cpython-312.pyc differ
|
|
|
src/llms/groq.py
CHANGED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from langchain_groq import ChatGroq
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class GroqLLM:
|
| 6 |
+
|
| 7 |
+
def __init__(self, user_controls_input):
|
| 8 |
+
self.user_controls_input = user_controls_input
|
| 9 |
+
|
| 10 |
+
def get_llm_model(self):
|
| 11 |
+
try:
|
| 12 |
+
groq_api_key = self.user_controls_input['API Key']
|
| 13 |
+
groq_model = self.user_controls_input['Selected Model']
|
| 14 |
+
|
| 15 |
+
if groq_api_key== "":
|
| 16 |
+
st.error('Please enter your Groq API key to proceed.')
|
| 17 |
+
|
| 18 |
+
llm = ChatGroq(model=groq_model, api_key=groq_api_key)
|
| 19 |
+
return llm
|
| 20 |
+
|
| 21 |
+
except KeyError as e:
|
| 22 |
+
st.error(f"Missing configuration: {e}")
|
| 23 |
+
return None
|
| 24 |
+
|
| 25 |
+
|
src/llms/openai.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from langchain_openai import ChatOpenAI
|
| 4 |
+
|
| 5 |
+
class OpenAILLM:
|
| 6 |
+
def __init__(self, user_controls_input):
|
| 7 |
+
self.user_controls_input = user_controls_input
|
| 8 |
+
|
| 9 |
+
def get_llm_model(self):
|
| 10 |
+
try:
|
| 11 |
+
openai_api_key = self.user_controls_input['API Key']
|
| 12 |
+
openai_model = self.user_controls_input['Selected Model']
|
| 13 |
+
|
| 14 |
+
if openai_api_key == "":
|
| 15 |
+
st.error('Please enter your OpenAI API key to proceed.')
|
| 16 |
+
|
| 17 |
+
llm = ChatOpenAI(model=openai_model, openai_api_key=openai_api_key)
|
| 18 |
+
return llm
|
| 19 |
+
|
| 20 |
+
except KeyError as e:
|
| 21 |
+
st.error(f"Missing configuration: {e}")
|
| 22 |
+
return None
|
src/main.py
CHANGED
|
@@ -2,6 +2,8 @@ import streamlit as st
|
|
| 2 |
import json
|
| 3 |
|
| 4 |
from src.ui.load import LoadStreamlitUI
|
|
|
|
|
|
|
| 5 |
|
| 6 |
def load_app():
|
| 7 |
"""
|
|
@@ -15,9 +17,26 @@ def load_app():
|
|
| 15 |
st.error("Failed to load the UI. Please check your configuration.")
|
| 16 |
return
|
| 17 |
|
| 18 |
-
if st.session_state.
|
| 19 |
user_message = st.session_state.timeframe
|
| 20 |
|
| 21 |
else:
|
| 22 |
user_message = st.text_input("Enter your message:")
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import json
|
| 3 |
|
| 4 |
from src.ui.load import LoadStreamlitUI
|
| 5 |
+
from src.llms.groq import GroqLLM
|
| 6 |
+
|
| 7 |
|
| 8 |
def load_app():
|
| 9 |
"""
|
|
|
|
| 17 |
st.error("Failed to load the UI. Please check your configuration.")
|
| 18 |
return
|
| 19 |
|
| 20 |
+
if st.session_state.IsFetchButtonClicked:
|
| 21 |
user_message = st.session_state.timeframe
|
| 22 |
|
| 23 |
else:
|
| 24 |
user_message = st.text_input("Enter your message:")
|
| 25 |
+
|
| 26 |
+
if user_message:
|
| 27 |
+
try:
|
| 28 |
+
llm = GroqLLM(user_controls_input=user_input)
|
| 29 |
+
groq_model = llm.get_llm_model()
|
| 30 |
+
|
| 31 |
+
if not groq_model:
|
| 32 |
+
st.error("Failed to initialize the Groq model. Please check your API key and model selection.")
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
use_case = user_input.get('use_case')
|
| 36 |
+
|
| 37 |
+
if use_case:
|
| 38 |
+
st.error("Error: No usecase selected.")
|
| 39 |
+
return
|
| 40 |
+
|
| 41 |
+
except Exception as e:
|
| 42 |
+
raise ValueError(f"An error occurred while processing the input: {e}")
|
src/ui/__pycache__/config.cpython-312.pyc
CHANGED
|
Binary files a/src/ui/__pycache__/config.cpython-312.pyc and b/src/ui/__pycache__/config.cpython-312.pyc differ
|
|
|
src/ui/__pycache__/load.cpython-312.pyc
CHANGED
|
Binary files a/src/ui/__pycache__/load.cpython-312.pyc and b/src/ui/__pycache__/load.cpython-312.pyc differ
|
|
|
src/ui/config.py
CHANGED
|
@@ -10,34 +10,34 @@ class LoadConfig:
|
|
| 10 |
Get the list of LLM options from the configuration file.
|
| 11 |
"""
|
| 12 |
|
| 13 |
-
return self.config.get
|
| 14 |
|
| 15 |
def get_use_case(self):
|
| 16 |
"""
|
| 17 |
Get the use case from config file.
|
| 18 |
"""
|
| 19 |
|
| 20 |
-
return self.config.get
|
| 21 |
|
| 22 |
def get_title(self):
|
| 23 |
"""
|
| 24 |
Get the title from config file.
|
| 25 |
"""
|
| 26 |
|
| 27 |
-
return self.config.get
|
| 28 |
|
| 29 |
def get_groq_models(self):
|
| 30 |
"""
|
| 31 |
Get groq models from the config file.
|
| 32 |
"""
|
| 33 |
|
| 34 |
-
return self.config.get
|
| 35 |
|
| 36 |
def get_openai_models(self):
|
| 37 |
"""
|
| 38 |
Get OpenAI models from config file.
|
| 39 |
"""
|
| 40 |
|
| 41 |
-
return self.config.get
|
| 42 |
|
| 43 |
|
|
|
|
| 10 |
Get the list of LLM options from the configuration file.
|
| 11 |
"""
|
| 12 |
|
| 13 |
+
return self.config.get('DEFAULT', 'LLM_options').split(', ')
|
| 14 |
|
| 15 |
def get_use_case(self):
|
| 16 |
"""
|
| 17 |
Get the use case from config file.
|
| 18 |
"""
|
| 19 |
|
| 20 |
+
return self.config.get('DEFAULT', 'USE_CASE').split(', ')
|
| 21 |
|
| 22 |
def get_title(self):
|
| 23 |
"""
|
| 24 |
Get the title from config file.
|
| 25 |
"""
|
| 26 |
|
| 27 |
+
return self.config.get('DEFAULT', 'Title')
|
| 28 |
|
| 29 |
def get_groq_models(self):
|
| 30 |
"""
|
| 31 |
Get groq models from the config file.
|
| 32 |
"""
|
| 33 |
|
| 34 |
+
return self.config.get('DEFAULT', 'GROQ_MODEL').split(', ')
|
| 35 |
|
| 36 |
def get_openai_models(self):
|
| 37 |
"""
|
| 38 |
Get OpenAI models from config file.
|
| 39 |
"""
|
| 40 |
|
| 41 |
+
return self.config.get('DEFAULT', 'OPENAI_MODEL').split(', ')
|
| 42 |
|
| 43 |
|
src/ui/load.py
CHANGED
|
@@ -26,7 +26,7 @@ class LoadStreamlitUI:
|
|
| 26 |
st.set_page_config(page_title=" π€" + self.config.get_title(), page_icon=":robot_face:", layout="wide")
|
| 27 |
st.header(" π€" + self.config.get_title())
|
| 28 |
st.session_state.timeframe = ''
|
| 29 |
-
st.session_state.
|
| 30 |
st.session_state.IsSDLC = False
|
| 31 |
|
| 32 |
with st.sidebar:
|
|
@@ -44,7 +44,7 @@ class LoadStreamlitUI:
|
|
| 44 |
self.user_controls['API Key'] = st.text_input('Enter Groq API Key', type='password')
|
| 45 |
|
| 46 |
if not self.user_controls['API Key']:
|
| 47 |
-
st.warning('
|
| 48 |
|
| 49 |
|
| 50 |
if self.user_controls['Selected LLM'] == 'OpenAI':
|
|
@@ -55,7 +55,7 @@ class LoadStreamlitUI:
|
|
| 55 |
self.user_controls['API Key'] = st.text_input('Enter OpenAI API Key', type='password')
|
| 56 |
|
| 57 |
if not self.user_controls['API Key']:
|
| 58 |
-
st.warning('
|
| 59 |
|
| 60 |
use_case = self.config.get_use_case()
|
| 61 |
|
|
|
|
| 26 |
st.set_page_config(page_title=" π€" + self.config.get_title(), page_icon=":robot_face:", layout="wide")
|
| 27 |
st.header(" π€" + self.config.get_title())
|
| 28 |
st.session_state.timeframe = ''
|
| 29 |
+
st.session_state.IsFetchButtonClicked = False
|
| 30 |
st.session_state.IsSDLC = False
|
| 31 |
|
| 32 |
with st.sidebar:
|
|
|
|
| 44 |
self.user_controls['API Key'] = st.text_input('Enter Groq API Key', type='password')
|
| 45 |
|
| 46 |
if not self.user_controls['API Key']:
|
| 47 |
+
st.warning('Please enter a valid Groq API key to proceed . If you don\'t have an API key, please visit https://console.groq.com to create one.')
|
| 48 |
|
| 49 |
|
| 50 |
if self.user_controls['Selected LLM'] == 'OpenAI':
|
|
|
|
| 55 |
self.user_controls['API Key'] = st.text_input('Enter OpenAI API Key', type='password')
|
| 56 |
|
| 57 |
if not self.user_controls['API Key']:
|
| 58 |
+
st.warning('Please enter a valid OpenAI API key to proceed . If you don\'t have an API key, please visit https://platform.openai.com/account/api-keys to create one.')
|
| 59 |
|
| 60 |
use_case = self.config.get_use_case()
|
| 61 |
|