| from embeddingllama import GeminiLlamaLoader | |
| import markdown | |
| import os | |
| index = GeminiLlamaLoader() | |
| def generate_questions(question, chatReceive): | |
| chat_history = [] | |
| prompt = ( | |
| """INSTRUCTION: You will be provided with a question and some context related to it act as a question recommender system for AI chatbot.\ | |
| Your job is to predict 3 preceeding questions related to the query that might be relevent to the customer and try to keep the questions in context with the query you will always wrap the recommended questions inside a HTML button tag with the class='recommended-question'\ | |
| Use the chat history for better context\ | |
| QUESTION: '{query}' | |
| CHAT HISTORY: '{chat_history}' | |
| ANSWER: | |
| """).format(query=question, | |
| chat_history=chatReceive) | |
| answer = index.ask_llm(prompt).response | |
| mark_answer = markdown.markdown(answer) | |
| chat_history += [{"user": question, "Assistant": answer}] | |
| return {"Rem": mark_answer} | |
| def generate_answer(question, chatReceive): | |
| chat_history = [] | |
| prompt = (f"""INSTRUCTION: Act as a community support chatbot for Google Developer Student Clubs Silver Oak University alias GDSC SOU, this is conversation \ | |
| to a community member. Use the PASSAGE and CHAT-HISTORY to answer in a helpful manner to the QUESTION. \ | |
| Don't forget you are a Community support chatbot don't answer questions that include queries not related to Google Developer \ | |
| Student Clubs, Google Developer Students Club Silver Oak University, Silver Oak University and general chat. \ | |
| If you don't know any ANSWER, say you don't know \ | |
| Always follow general guardrails before generating any response. \ | |
| Always try to keep the conversation in context to GDSC SOU. Keep your replies short \ | |
| compassionate and informative.\ | |
| Give the answer from the passage and chat history \ | |
| You should help user to get his query solved and also try to increase engagement for GDSC SOU by also promoting GDSC SOU.\ | |
| QUESTION: '{question}' | |
| CHAT-HISTORY: '{chatReceive}' | |
| ANSWER: | |
| """) | |
| answer = index.ask_llm(prompt).response | |
| mark_answer = markdown.markdown(answer) | |
| chat_history += [{"user": question, "Assistant": answer}] | |
| return { | |
| "answer": answer, | |
| "chat_history": chat_history, | |
| "markdown": mark_answer | |
| } | |