Daksh0505 commited on
Commit
8708c1c
·
verified ·
1 Parent(s): a083328

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -5
app.py CHANGED
@@ -98,9 +98,9 @@ def create_vector_store(transcript):
98
  # -------------------------------------------------
99
  def build_model(model_choice, temperature=0.7):
100
  """Return the correct model and a flag indicating if it’s chat-based."""
101
- if model_choice == "Flan-T5 (Free)":
102
  llm = HuggingFaceEndpoint(
103
- repo_id="google/flan-t5-base",
104
  huggingfacehub_api_token=api_key,
105
  task="text-generation",
106
  max_new_tokens=500,
@@ -108,7 +108,16 @@ def build_model(model_choice, temperature=0.7):
108
  )
109
  return ChatHuggingFace(llm=llm, temperature=temperature), True # (model, is_chat)
110
 
111
- elif model_choice == "DeepSeek":
 
 
 
 
 
 
 
 
 
112
  llm = HuggingFaceEndpoint(
113
  repo_id="deepseek-ai/DeepSeek-V3.2-Exp",
114
  huggingfacehub_api_token=api_key,
@@ -117,7 +126,7 @@ def build_model(model_choice, temperature=0.7):
117
  )
118
  return ChatHuggingFace(llm=llm, temperature=temperature), True
119
 
120
- elif model_choice == "OpenAI":
121
  llm = HuggingFaceEndpoint(
122
  repo_id="openai/gpt-oss-20b",
123
  huggingfacehub_api_token=api_key,
@@ -148,7 +157,7 @@ st.title("🎬 YouTube Transcript Chatbot (RAG)")
148
 
149
  video_id = st.text_input("YouTube Video ID", value="lv1_-RER4_I")
150
  query = st.text_area("Your Query", value="What is RAG?")
151
- model_choice = st.radio("Model to Use", ["Flan-T5 (Free)", "DeepSeek", "OpenAI"])
152
  temperature = st.slider("Temperature", 0, 100, value=50) / 100.0
153
 
154
  # Get available languages for this video
 
98
  # -------------------------------------------------
99
  def build_model(model_choice, temperature=0.7):
100
  """Return the correct model and a flag indicating if it’s chat-based."""
101
+ if model_choice == "TinyLlama-1.1B":
102
  llm = HuggingFaceEndpoint(
103
+ repo_id="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
104
  huggingfacehub_api_token=api_key,
105
  task="text-generation",
106
  max_new_tokens=500,
 
108
  )
109
  return ChatHuggingFace(llm=llm, temperature=temperature), True # (model, is_chat)
110
 
111
+ elif model_choice == "Gemma-2-3B":
112
+ llm = HuggingFaceEndpoint(
113
+ repo_id="google/gemma-2-2b-it",
114
+ huggingfacehub_api_token=api_key,
115
+ task="text-generation",
116
+ max_new_tokens=500
117
+ )
118
+ return ChatHuggingFace(llm=llm, temperature=temperature), True
119
+
120
+ elif model_choice == "DeepSeek-685B":
121
  llm = HuggingFaceEndpoint(
122
  repo_id="deepseek-ai/DeepSeek-V3.2-Exp",
123
  huggingfacehub_api_token=api_key,
 
126
  )
127
  return ChatHuggingFace(llm=llm, temperature=temperature), True
128
 
129
+ elif model_choice == "OpenAI-20B":
130
  llm = HuggingFaceEndpoint(
131
  repo_id="openai/gpt-oss-20b",
132
  huggingfacehub_api_token=api_key,
 
157
 
158
  video_id = st.text_input("YouTube Video ID", value="lv1_-RER4_I")
159
  query = st.text_area("Your Query", value="What is RAG?")
160
+ model_choice = st.radio("Model to Use", ["TinyLlama-1.1B", "Gemma-2-3B", "DeepSeek-685B", "OpenAI-20B"])
161
  temperature = st.slider("Temperature", 0, 100, value=50) / 100.0
162
 
163
  # Get available languages for this video