Spaces:
Sleeping
Sleeping
File size: 1,217 Bytes
9366a75 b7c93ca 0c28510 9366a75 b7c93ca 9366a75 b7c93ca 9366a75 b7c93ca 9366a75 b7c93ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
import streamlit as st
import os
from langchain import HuggingFaceHub,PromptTemplate,LLMChain
from getpass import getpass
#HUGGINGFACE_API_TOKEN
os.environ["HUGGINGFACEHUB_API_TOKEN"] = "Token"
model_id="mistralai/Mistral-7B-Instruct-v0.2"
conv_model=HuggingFaceHub(huggingfacehub_api_token=os.environ['HUGGINGFACEHUB_API_TOKEN'],
repo_id=model_id,
model_kwargs={"temperature":0.2,"max_new_tokens":5000})
def answer_question(question):
prompt = PromptTemplate( input_variables=['question'],
template="""You are a logical and a numerical question solver.Could you please answer this question ? {question}
Show all the steps in details to solve the problem.
""")
conv_chain = LLMChain(llm=conv_model, prompt=prompt, verbose=True,output_key="Answer")
response = conv_chain({'question':question})
return response
st.title("Zero Shot Question Answering")
input=st.text_input("Input: ",key="input")
submit=st.button("Ask the question")
if submit and input:
response=answer_question(input)
st.write(response['Answer'])
if __name__ == "__main__":
print("nothing given") |