saa231 commited on
Commit
44337e5
·
verified ·
1 Parent(s): e30c0c6

Update project_model.py

Browse files
Files changed (1) hide show
  1. project_model.py +7 -11
project_model.py CHANGED
@@ -1,10 +1,3 @@
1
- '-*- coding: utf-8 -*-'
2
- """project_model.ipynb
3
- Automatically generated by Colab.
4
- Original file is located at
5
- https://colab.research.google.com/drive/1oopkA5yIlfizFuhXOPmTK7MUNh3Qasa3
6
- """
7
-
8
  # project_module.py
9
 
10
  # Import libraries for ML, CV, NLP, audio, and TTS
@@ -197,7 +190,7 @@ def process_inputs(
197
  if audio_path:
198
  # Process audio to text
199
  audio_text = whisper_pipe(audio_path)["text"]
200
- question += ' You are a helpful visual assistant designed for visually impaired users that assists users by answering the following question. If unsure, say "I am not certain."' + audio_text.strip()
201
 
202
  # Add user's new question to the history
203
  session.add_question(question)
@@ -215,9 +208,12 @@ def process_inputs(
215
 
216
  # Process the input through Gemma
217
  gemma_output = gemma_pipe(
218
- images=session.current_image,
219
- text=question + " This is the shared visual context: " + session.visual_context
220
- )
 
 
 
221
 
222
  # Handle the output from Gemma model safely
223
  if isinstance(gemma_output, list) and len(gemma_output) > 0:
 
 
 
 
 
 
 
 
1
  # project_module.py
2
 
3
  # Import libraries for ML, CV, NLP, audio, and TTS
 
190
  if audio_path:
191
  # Process audio to text
192
  audio_text = whisper_pipe(audio_path)["text"]
193
+ question += ' ' + audio_text.strip()
194
 
195
  # Add user's new question to the history
196
  session.add_question(question)
 
208
 
209
  # Process the input through Gemma
210
  gemma_output = gemma_pipe(
211
+ images=session.current_image,
212
+ text=[
213
+ {"role": "system", "content": 'You are a helpful visual assistant for visually impaired users. If unsure, say "I am not certain."'},
214
+ {"role": "user", "content": "Question: " + question + " This is the shared visual context: " + session.visual_context}
215
+ ]
216
+ )
217
 
218
  # Handle the output from Gemma model safely
219
  if isinstance(gemma_output, list) and len(gemma_output) > 0: