Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -76,7 +76,7 @@ languages = {
|
|
| 76 |
components = {}
|
| 77 |
dotenv.load_dotenv()
|
| 78 |
seamless_client = Client("facebook/seamless_m4t")
|
| 79 |
-
tulu_client = Client("https://tonic1-tulu.hf.space/--replicas/
|
| 80 |
HuggingFace_Token = os.getenv("HuggingFace_Token")
|
| 81 |
hf_token = os.getenv("HuggingFace_Token")
|
| 82 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
@@ -367,7 +367,7 @@ def process_summary_with_tulu(summary):
|
|
| 367 |
# Define the parameters for the Tulu Gradio client API call
|
| 368 |
your_message = summary
|
| 369 |
assistant_message = "You are a medical instructor. Assess and describe the proper options to your students in minute detail. Propose a course of action for them to base their recommendations on based on your description."
|
| 370 |
-
max_new_tokens =
|
| 371 |
temperature = 0.4
|
| 372 |
top_p = 0.9
|
| 373 |
repetition_penalty = 0.9
|
|
|
|
| 76 |
components = {}
|
| 77 |
dotenv.load_dotenv()
|
| 78 |
seamless_client = Client("facebook/seamless_m4t")
|
| 79 |
+
tulu_client = Client("https://tonic1-tulu.hf.space/--replicas/l7n44/")
|
| 80 |
HuggingFace_Token = os.getenv("HuggingFace_Token")
|
| 81 |
hf_token = os.getenv("HuggingFace_Token")
|
| 82 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
|
| 367 |
# Define the parameters for the Tulu Gradio client API call
|
| 368 |
your_message = summary
|
| 369 |
assistant_message = "You are a medical instructor. Assess and describe the proper options to your students in minute detail. Propose a course of action for them to base their recommendations on based on your description."
|
| 370 |
+
max_new_tokens = 550
|
| 371 |
temperature = 0.4
|
| 372 |
top_p = 0.9
|
| 373 |
repetition_penalty = 0.9
|