Spaces:
Runtime error
Runtime error
Faisal
commited on
Commit
Β·
8b36615
1
Parent(s):
6be87f1
Restore GPU version - remove CPU optimizations and restore GPU-compatible dependencies
Browse files
app.py
CHANGED
|
@@ -99,9 +99,9 @@ def process_pipeline(image, user_question):
|
|
| 99 |
)[0]
|
| 100 |
|
| 101 |
|
| 102 |
-
MAX_INPUT_CHARS = 50
|
| 103 |
-
if len(output_text) > MAX_INPUT_CHARS:
|
| 104 |
-
|
| 105 |
|
| 106 |
|
| 107 |
# Send MedVLM output to DeepSeek R1
|
|
@@ -119,6 +119,7 @@ Original Answer:
|
|
| 119 |
}
|
| 120 |
data = {
|
| 121 |
"model": deepseek_model,
|
|
|
|
| 122 |
"messages": [
|
| 123 |
{"role": "system", "content": "You are a highly skilled medical writer."},
|
| 124 |
{"role": "user", "content": prompt}
|
|
|
|
| 99 |
)[0]
|
| 100 |
|
| 101 |
|
| 102 |
+
# MAX_INPUT_CHARS = 50
|
| 103 |
+
# if len(output_text) > MAX_INPUT_CHARS:
|
| 104 |
+
# output_text = output_text[:MAX_INPUT_CHARS] + "... [truncated]"
|
| 105 |
|
| 106 |
|
| 107 |
# Send MedVLM output to DeepSeek R1
|
|
|
|
| 119 |
}
|
| 120 |
data = {
|
| 121 |
"model": deepseek_model,
|
| 122 |
+
"max_tokens": 4000,
|
| 123 |
"messages": [
|
| 124 |
{"role": "system", "content": "You are a highly skilled medical writer."},
|
| 125 |
{"role": "user", "content": prompt}
|