Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import os | |
| from langchain import HuggingFaceHub,PromptTemplate,LLMChain | |
| from getpass import getpass | |
| #HUGGINGFACE_API_TOKEN | |
| os.environ["HUGGINGFACEHUB_API_TOKEN"] = "Token" | |
| model_id="mistralai/Mistral-7B-Instruct-v0.2" | |
| conv_model=HuggingFaceHub(huggingfacehub_api_token=os.environ['HUGGINGFACEHUB_API_TOKEN'], | |
| repo_id=model_id, | |
| model_kwargs={"temperature":0.2,"max_new_tokens":5000}) | |
| def answer_question(question): | |
| prompt = PromptTemplate( input_variables=['question'], | |
| template="""You are a logical and a numerical question solver.Could you please answer this question ? {question} | |
| Show all the steps in details to solve the problem. | |
| """) | |
| conv_chain = LLMChain(llm=conv_model, prompt=prompt, verbose=True,output_key="Answer") | |
| response = conv_chain({'question':question}) | |
| return response | |
| st.title("Zero Shot Question Answering") | |
| input=st.text_input("Input: ",key="input") | |
| submit=st.button("Ask the question") | |
| if submit and input: | |
| response=answer_question(input) | |
| st.write(response['Answer']) | |
| if __name__ == "__main__": | |
| print("nothing given") |