Pulkit-exe commited on
Commit
a1f2cae
·
1 Parent(s): a6d5f76

changes to inference model

Browse files
Files changed (1) hide show
  1. app.py +11 -5
app.py CHANGED
@@ -6,7 +6,9 @@ import gradio as gr
6
  from huggingface_hub import InferenceClient
7
  import os
8
  import shutil
 
9
 
 
10
 
11
  shutil.rmtree("/root/.cache", ignore_errors=True)
12
  shutil.rmtree("/tmp", ignore_errors=True)
@@ -16,7 +18,7 @@ CHROMA_PATH = "chroma"
16
  KEY = os.getenv("token")
17
 
18
  # Hugging Face API setup
19
- repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
20
 
21
  PROMPT_TEMPLATE = """
22
  Answer the question based on the context provided. If no relevant information is found, state so.
@@ -28,6 +30,10 @@ Question:
28
  {question}
29
 
30
  Answer:
 
 
 
 
31
  """
32
 
33
  # Initialize the local embedding model
@@ -61,8 +67,8 @@ class LLM:
61
  "content": prompt
62
  }
63
  ],
64
- max_tokens=250,
65
- temperature=0.2
66
  )
67
  return str(completion.choices[0].message.content)
68
 
@@ -116,7 +122,7 @@ def predict(message, history):
116
  # Define the introductory content
117
  intro_content = """
118
  # Course Recommendation Bot
119
- This bot helps you find **free courses** available on [Analytics Vidhya](https://www.analyticsvidhya.com/).
120
  You can ask any questions related to these courses.
121
 
122
  For example:
@@ -129,4 +135,4 @@ with gr.Blocks() as demo:
129
  gr.Markdown(intro_content) # Display introductory content
130
  chatbot = gr.ChatInterface(predict, type="messages") # Chat interface
131
 
132
- demo.launch(share=True)
 
6
  from huggingface_hub import InferenceClient
7
  import os
8
  import shutil
9
+ from dotenv import load_dotenv
10
 
11
+ load_dotenv()
12
 
13
  shutil.rmtree("/root/.cache", ignore_errors=True)
14
  shutil.rmtree("/tmp", ignore_errors=True)
 
18
  KEY = os.getenv("token")
19
 
20
  # Hugging Face API setup
21
+ repo_id = "Qwen/Qwen2.5-7B-Instruct"
22
 
23
  PROMPT_TEMPLATE = """
24
  Answer the question based on the context provided. If no relevant information is found, state so.
 
30
  {question}
31
 
32
  Answer:
33
+ // Do not include the statement "Based on the provided context" in your answer. Start directly with the answer.
34
+ // If the user mentions that he does not want to know about these courses, ask him what topic he wants to learn about in the answer.
35
+
36
+ If you encounter a text message which is not related to the context, the bot must respond with "I don't have relevant information to answer that. Kindly ask queries related to free data science and machine learning courses on Analytics Vidhya."
37
  """
38
 
39
  # Initialize the local embedding model
 
67
  "content": prompt
68
  }
69
  ],
70
+ max_tokens=500,
71
+ temperature=0.8
72
  )
73
  return str(completion.choices[0].message.content)
74
 
 
122
  # Define the introductory content
123
  intro_content = """
124
  # Course Recommendation Bot
125
+ This bot helps you find **free courses related to python, data science and machine learning** available on [Analytics Vidhya](https://www.analyticsvidhya.com/).
126
  You can ask any questions related to these courses.
127
 
128
  For example:
 
135
  gr.Markdown(intro_content) # Display introductory content
136
  chatbot = gr.ChatInterface(predict, type="messages") # Chat interface
137
 
138
+ demo.launch(share=False)