Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from streamlit_chat import message | |
| import os, tempfile, sys | |
| from io import BytesIO | |
| from io import StringIO | |
| import pandas as pd | |
| from langchain.agents import create_pandas_dataframe_agent | |
| from langchain.llms.openai import OpenAI | |
| from langchain.embeddings.openai import OpenAIEmbeddings | |
| from langchain.chains.summarize import load_summarize_chain | |
| from langchain.document_loaders.csv_loader import CSVLoader | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| from langchain.text_splitter import CharacterTextSplitter | |
| from langchain.chains.mapreduce import MapReduceChain | |
| from langchain.docstore.document import Document | |
| from langchain.vectorstores import FAISS | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import ConversationalRetrievalChain | |
| from langchain.chains import RetrievalQA | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain.chains.conversational_retrieval.prompts import CONDENSE_QUESTION_PROMPT | |
| from langchain.chains.question_answering import load_qa_chain | |
| from langchain.prompts.prompt import PromptTemplate | |
| from langchain import LLMChain | |
| st.set_page_config(page_title="CSV Analyzer AI", layout="wide") | |
| def chat(temperature, model_name): | |
| st.write("# Talk to CSV") | |
| # Add functionality for Page 1 | |
| reset = st.sidebar.button("Reset Chat") | |
| uploaded_file = st.sidebar.file_uploader("Upload your CSV here π:", type="csv") | |
| if uploaded_file : | |
| with tempfile.NamedTemporaryFile(delete=False) as tmp_file: | |
| tmp_file.write(uploaded_file.getvalue()) | |
| tmp_file_path = tmp_file.name | |
| loader = CSVLoader(file_path=tmp_file_path, encoding="utf-8") | |
| data = loader.load() | |
| embeddings = OpenAIEmbeddings() | |
| vectors = FAISS.from_documents(data, embeddings) | |
| chain = ConversationalRetrievalChain.from_llm(llm = ChatOpenAI(temperature=0.0,model_name='gpt-3.5-turbo', openai_api_key=user_api_key), | |
| retriever=vectors.as_retriever()) | |
| def conversational_chat(query): | |
| result = chain({"question": query, "chat_history": st.session_state['history']}) | |
| st.session_state['history'].append((query, result["answer"])) | |
| return result["answer"] | |
| if 'history' not in st.session_state: | |
| st.session_state['history'] = [] | |
| if 'generated' not in st.session_state: | |
| st.session_state['generated'] = ["Hello ! Ask me anything about " + uploaded_file.name + " π€"] | |
| if 'past' not in st.session_state: | |
| st.session_state['past'] = ["Hey ! π"] | |
| #container for the chat history | |
| response_container = st.container() | |
| #container for the user's text input | |
| container = st.container() | |
| with container: | |
| with st.form(key='my_form', clear_on_submit=True): | |
| user_input = st.text_input("Query:", placeholder="Talk about your csv data here (:", key='input') | |
| submit_button = st.form_submit_button(label='Send') | |
| if submit_button and user_input: | |
| output = conversational_chat(user_input) | |
| st.session_state['past'].append(user_input) | |
| st.session_state['generated'].append(output) | |
| if st.session_state['generated']: | |
| with response_container: | |
| for i in range(len(st.session_state['generated'])): | |
| message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile") | |
| message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs") | |
| # Main App | |
| st.markdown( | |
| """ | |
| <div style='text-align: center;'> | |
| <h1>CSV Analyzer AI</h1> | |
| </div> | |
| """, | |
| unsafe_allow_html=True, | |
| ) | |
| st.markdown( | |
| """ | |
| <div style='text-align: center;'> | |
| <h4>β‘οΈ Analyzing CSV Files</h4> | |
| </div> | |
| """, | |
| unsafe_allow_html=True, | |
| ) | |
| if os.path.exists(".env") and os.environ.get("OPENAI_API_KEY") is not None: | |
| user_api_key = os.environ["OPENAI_API_KEY"] | |
| st.success("API key loaded from .env", icon="π") | |
| else: | |
| user_api_key = st.sidebar.text_input( | |
| label="#### Enter OpenAI API key π", placeholder="Paste your openAI API key, sk-", type="password", key="openai_api_key" | |
| ) | |
| if user_api_key: | |
| st.sidebar.success("API key loaded", icon="π") | |
| os.environ["OPENAI_API_KEY"] = user_api_key | |
| # Execute the home page function | |
| MODEL_OPTIONS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"] | |
| max_tokens = {"gpt-4":7000, "gpt-4-32k":31000, "gpt-3.5-turbo":3000} | |
| TEMPERATURE_MIN_VALUE = 0.0 | |
| TEMPERATURE_MAX_VALUE = 1.0 | |
| TEMPERATURE_DEFAULT_VALUE = 0.9 | |
| TEMPERATURE_STEP = 0.01 | |
| model_name = st.sidebar.selectbox(label="Model", options=MODEL_OPTIONS) | |
| top_p = st.sidebar.slider("Top_P", 0.0, 1.0, 1.0, 0.1) | |
| freq_penalty = st.sidebar.slider("Frequency Penalty", 0.0, 2.0, 0.0, 0.1) | |
| temperature = st.sidebar.slider( | |
| label="Temperature", | |
| min_value=TEMPERATURE_MIN_VALUE, | |
| max_value=TEMPERATURE_MAX_VALUE, | |
| value=TEMPERATURE_DEFAULT_VALUE, | |
| step=TEMPERATURE_STEP,) | |
| chat(temperature=temperature, model_name=model_name) | |