Spaces:
Runtime error
Runtime error
File size: 3,722 Bytes
6df5c93 21b7541 fcfb36c 125fa0c f356efb 6df5c93 ebd0b92 6df5c93 ebd0b92 0b47392 6df5c93 c7fa549 6df5c93 ebd0b92 f79e678 0ae54ee ebd0b92 0ae54ee 1ef39ed 0ae54ee 1ef39ed 369be0e d771edf 369be0e 7bd6818 d8207a8 5cebc05 1ef39ed d771edf 54aec66 7bd6818 98397d1 e2e1781 1ef39ed 29c0439 1ef39ed aace96d 1ef39ed aace96d 4508a9a aace96d 1ef39ed aace96d 1ef39ed aace96d d771edf aace96d 1ef39ed 29c0439 1ef39ed aace96d 4508a9a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
import os
import json
import gradio as gr
from huggingface_hub import HfApi, login
from dotenv import load_dotenv
from llm import get_groq_llm
from vectorstore import get_chroma_vectorstore
from embeddings import get_SFR_Code_embedding_model
from kadiApy_ragchain import KadiApyRagchain
# Load environment variables from .env file
load_dotenv()
vectorstore_path = "data/vectorstore"
GROQ_API_KEY = os.environ["GROQ_API_KEY"]
HF_TOKEN = os.environ["HF_Token"]
with open("config.json", "r") as file:
config = json.load(file)
login(HF_TOKEN)
hf_api = HfApi()
LLM_MODEL_NAME = config["llm_model_name"]
LLM_MODEL_TEMPERATURE = float(config["llm_model_temperature"])
def initialize():
vectorstore = get_chroma_vectorstore(get_SFR_Code_embedding_model(), vectorstore_path)
llm = get_groq_llm(LLM_MODEL_NAME, LLM_MODEL_TEMPERATURE, GROQ_API_KEY)
return KadiApyRagchain(llm, vectorstore)
def bot_kadi(chat_history, kadiAPY_ragchain):
user_query = chat_history[-1][0]
response = kadiAPY_ragchain.process_query(user_query, chat_history)
chat_history[-1] = (user_query, response)
return chat_history
#gradio utils
def add_text_to_chat_history(chat_history, user_input):
chat_history = chat_history + [(user_input, None)]
return chat_history, ""
def show_history(chat_history):
return chat_history
def reset_all():
return [], "", ""
def main():
kadiAPY_ragchain = initialize() # Initialize inside main()
with gr.Blocks() as demo:
gr.Markdown("## KadiAPY - AI Coding-Assistant")
gr.Markdown("AI assistant for KadiAPY based on RAG architecture powered by LLM")
chat_history = gr.State([])
with gr.Tab("KadiAPY - AI Assistant"):
with gr.Row():
with gr.Column(scale=10):
chatbot = gr.Chatbot([], elem_id="chatbot", label="Kadi Bot", bubble_full_width=False, show_copy_button=True, height=600)
user_txt = gr.Textbox(label="Question", placeholder="Type in your question and press Enter or click Submit")
with gr.Row():
with gr.Column(scale=1):
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
clear_btn = gr.Button("Clear", variant="stop")
gr.Examples(
examples=[
"Write me a python script with which can convert plain JSON to a Kadi4Mat-compatible extra metadata structure",
"I need a method to upload a file to a record. The id of the record is 3",
],
inputs=user_txt,
outputs=chatbot,
fn=add_text_to_chat_history,
label="Try asking...",
cache_examples=False,
examples_per_page=3,
)
user_txt.submit(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt])\
.then(show_history, [chat_history], [chatbot])\
.then(bot_kadi, [chat_history, kadiAPY_ragchain], [chatbot])
submit_btn.click(add_text_to_chat_history, [chat_history, user_txt], [chat_history, user_txt])\
.then(show_history, [chat_history], [chatbot])\
.then(bot_kadi, [chat_history, kadiAPY_ragchain], [chatbot])
clear_btn.click(
reset_all,
None,
[chat_history, chatbot, user_txt],
queue=False
)
demo.launch()
if __name__ == "__main__":
main()
|