Update inference.py
Browse files- inference.py +2 -2
inference.py
CHANGED
|
@@ -39,7 +39,7 @@ def process_inference_results(results, process_image=False):
|
|
| 39 |
|
| 40 |
return extracted_texts
|
| 41 |
|
| 42 |
-
def inference_and_run(image_path, prompt, conv_mode="ferret_gemma_instruct", model_path="jadechoghari/Ferret-UI-Gemma2b", box=None):
|
| 43 |
"""
|
| 44 |
Run the inference and capture the errors for debugging.
|
| 45 |
"""
|
|
@@ -87,7 +87,7 @@ def inference_and_run(image_path, prompt, conv_mode="ferret_gemma_instruct", mod
|
|
| 87 |
with open(output_file_path, "r") as output_file:
|
| 88 |
results = [json.loads(line) for line in output_file]
|
| 89 |
|
| 90 |
-
return process_inference_results(results)
|
| 91 |
else:
|
| 92 |
print("No output JSONL files found.")
|
| 93 |
return None, None
|
|
|
|
| 39 |
|
| 40 |
return extracted_texts
|
| 41 |
|
| 42 |
+
def inference_and_run(image_path, prompt, conv_mode="ferret_gemma_instruct", model_path="jadechoghari/Ferret-UI-Gemma2b", box=None, process_image=False):
|
| 43 |
"""
|
| 44 |
Run the inference and capture the errors for debugging.
|
| 45 |
"""
|
|
|
|
| 87 |
with open(output_file_path, "r") as output_file:
|
| 88 |
results = [json.loads(line) for line in output_file]
|
| 89 |
|
| 90 |
+
return process_inference_results(results, process_image)
|
| 91 |
else:
|
| 92 |
print("No output JSONL files found.")
|
| 93 |
return None, None
|