Update app.py
Browse files
app.py
CHANGED
|
@@ -58,7 +58,6 @@ def respond(
|
|
| 58 |
fullprompt += f"Human: {user}\nAI: {assistant}\n"
|
| 59 |
|
| 60 |
fullprompt += f"Human: {message}\nAI:"
|
| 61 |
-
|
| 62 |
APIL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
|
| 63 |
headers = {"Authorization": f"Bearer {hftoken}"}
|
| 64 |
|
|
@@ -105,13 +104,13 @@ demo = gr.ChatInterface(
|
|
| 105 |
CSV νμΌμ μλ λ΄μ©μ λν΄μλ μ μ ν λλ΅μ μμ±ν΄ μ£ΌμΈμ.
|
| 106 |
""", label="μμ€ν
ν둬ννΈ"),
|
| 107 |
gr.Slider(minimum=1, maximum=4000, value=1000, step=1, label="Max new tokens"),
|
| 108 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="
|
| 109 |
gr.Slider(
|
| 110 |
minimum=0.1,
|
| 111 |
maximum=1.0,
|
| 112 |
value=0.95,
|
| 113 |
step=0.05,
|
| 114 |
-
label="
|
| 115 |
),
|
| 116 |
],
|
| 117 |
examples=[
|
|
|
|
| 58 |
fullprompt += f"Human: {user}\nAI: {assistant}\n"
|
| 59 |
|
| 60 |
fullprompt += f"Human: {message}\nAI:"
|
|
|
|
| 61 |
APIL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
|
| 62 |
headers = {"Authorization": f"Bearer {hftoken}"}
|
| 63 |
|
|
|
|
| 104 |
CSV νμΌμ μλ λ΄μ©μ λν΄μλ μ μ ν λλ΅μ μμ±ν΄ μ£ΌμΈμ.
|
| 105 |
""", label="μμ€ν
ν둬ννΈ"),
|
| 106 |
gr.Slider(minimum=1, maximum=4000, value=1000, step=1, label="Max new tokens"),
|
| 107 |
+
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="temperature"),
|
| 108 |
gr.Slider(
|
| 109 |
minimum=0.1,
|
| 110 |
maximum=1.0,
|
| 111 |
value=0.95,
|
| 112 |
step=0.05,
|
| 113 |
+
label="top-p (nucleus sampling)",
|
| 114 |
),
|
| 115 |
],
|
| 116 |
examples=[
|