Spaces:
Sleeping
Sleeping
| import os | |
| from langchain import HuggingFaceHub,PromptTemplate,LLMChain | |
| from getpass import getpass | |
| #HUGGINGFACE_API_TOKEN=getpass() | |
| os.environ["HUGGINGFACEHUB_API_TOKEN"] = "Token" | |
| model_id="mistralai/Mistral-7B-Instruct-v0.2" | |
| conv_model=HuggingFaceHub(huggingfacehub_api_token=os.environ['HUGGINGFACEHUB_API_TOKEN'], | |
| repo_id=model_id, | |
| model_kwargs={"temperature":0.5,"max_new_tokens":5000}) | |
| def answer_question(question): | |
| prompt = PromptTemplate( input_variables=['question'], | |
| template="""You are a logical and a numerical question solver.Could you please answer this question ?{question} | |
| """) | |
| conv_chain = LLMChain(llm=conv_model, prompt=prompt, verbose=True,output_key="Answer") | |
| response = conv_chain({'question':question}) | |
| return response | |
| if __name__ == "__main__": | |
| print("nothing given") |