from llama_index.llms.perplexity import Perplexity from llama_index.core.llms import ChatMessage def get_response_from_perplexity(question: str, perplexity_key): # Initialize Perplexity LLM perplexity_llm = Perplexity( api_key=perplexity_key, model="sonar", temperature=0.2 ) messages_dict = [ {"role": "system", "content": "answer in 2-3 sentences"}, {"role": "user", "content": question}, ] messages = [ChatMessage(**msg) for msg in messages_dict] response = perplexity_llm.chat(messages) return response