import streamlit as st from llama_cpp import Llama import llama_cpp import time st.set_page_config(layout = "wide") import streamlit.components.v1 as components col1, mid, col2 = st.columns([4,1,40]) with col1: st.image("meta-llama-3.webp", width=150) with col2: st.markdown('''# Llama :blue[3.2] ''') st.markdown('''##### :gray[finetuned for SQL questions]''') llm = Llama.from_pretrained(repo_id="alessandroptsn/llama3_2sql_model_long",filename="*llama3_2sql_model_long.Q4_K_M.gguf",verbose=False,n_ctx=100000) st.session_state.context_text =""" CREATE TABLE student ( student_id INT PRIMARY KEY, student_name TEXT, age INT ); INSERT INTO student (student_id, student_name, age) VALUES (1, 'Carlos Silva', 20), (2, 'Ana Oliveira', 22), (3, 'João Pereira', 19); CREATE TABLE topic ( topic_id INT PRIMARY KEY, topic_name TEXT, hours INT ); INSERT INTO topic (topic_id, topic_name, hours) VALUES (1, 'Mathematics', 60), (2, 'History', 40), (3, 'Biology', 50); CREATE TABLE grades ( grades_id INT PRIMARY KEY, student_id INT, topic_id INT, grade REAL, FOREIGN KEY (student_id) REFERENCES student(student_id), FOREIGN KEY (topic_id) REFERENCES topic(topic_id) ); INSERT INTO grades (grades_id,student_id, topic_id, grade) VALUES (1,1, 1, 8.5), (2,1, 2, 7.0), (3,2, 1, 9.0), (4,2, 3, 6.5), (5,3, 2, 7.5), (6,3, 3, 7.0); """ def context_text_clicked(): st.session_state.text = st.session_state.context_text def text_clicked2(): st.session_state.text = st.session_state.text query = st.text_input(label="Insert your question here") col1, col2 = st.columns([1, 3]) with col1: context_check = st.checkbox("Add context") if context_check: test = st.session_state.context_text st.session_state.test2 = test st.text_area("For any change, click in 'Apply context'",test, key="text",height=700) b1, b2 = st.columns(2) with b1: if st.button("Reset context", on_click=context_text_clicked): st.session_state.concatenated_context = st.session_state.context_text with b2: if st.button("Apply context", on_click=text_clicked2): st.session_state.concatenated_context = st.session_state.text with col2: if st.button("Answer"): start = time.time() try: final_context = st.session_state.concatenated_context except: try: final_context = st.session_state.test2 except: final_context = '' st.session_state.resp = llm.create_chat_completion( messages=[ { "role": "system", "content": "You are an SQL generator that takes a users query and gives them helpful SQL to use. Answer in SQL querys for the user." }, { "role" : "template", "content":"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. ### Instruction: Company database: """+final_context+""" ### Input: SQL Prompt: """+query+""" ### Response: SQL: {} Explanation: {} """ } ]#,temperature=0.85 ) end = time.time() st.session_state.total_time = end - start if 'resp' in st.session_state: st.markdown(st.session_state.resp["choices"][0]["message"]['content']) st.write(f"Execution time: {st.session_state.total_time:.2f} seconds") if context_check: st.write("Do you want to validate the query? run it [here](%s)" % 'https://onecompiler.com/postgresql/') if context_check: st.text("Diagram of the example context provided :") components.iframe("https://dbdiagram.io/e/6779dbd45406798ef74a4a61/6779dbf35406798ef74a4bbe", height=500) footer="""
""" st.markdown(footer,unsafe_allow_html=True)