Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from langchain_groq import ChatGroq | |
| from langchain.chains import LLMChain | |
| from langchain.prompts import PromptTemplate | |
| # Streamlit Application Title | |
| st.title('AI Scientist') | |
| st.subheader('Here you will find solutions for your problem') | |
| # Input Fields | |
| A = st.text_input('Enter Academic year') | |
| B = st.text_input('Enter Streams of education') | |
| details = f""" | |
| Academic year: {A} | |
| Stream: {B} | |
| """ | |
| # Question Input and Submit Button | |
| A1 = st.text_input('Enter your question') | |
| SUB = st.button('SUBMIT') | |
| # Define the Prompt Template for Questions | |
| B2 = PromptTemplate( | |
| input_variables=["details", "k"], | |
| template="Tell me about {k} based on the following details: {details} in 20 words." | |
| ) | |
| # Initialize the ChatGroq Model | |
| model = ChatGroq( | |
| temperature=0.6, | |
| groq_api_key='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' # Replace with your actual API key | |
| ) | |
| # Process the Question on Button Click | |
| if SUB and A1: # Ensure question input is provided | |
| X = B2.format(k=A1, details=details) | |
| response = model.predict(X) # Use predict method to interact with the model | |
| st.write(response) | |