Spaces:
Runtime error
Runtime error
Commit
·
f2ff438
1
Parent(s):
606b3d2
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,20 +3,23 @@ os.environ['REPLICATE_API_TOKEN'] = "r8_afc5kESy4ucPojF3Tw1GE25ER4Ovudy1iPVw6"
|
|
| 3 |
|
| 4 |
import replicate
|
| 5 |
|
| 6 |
-
#Prompts
|
| 7 |
pre_prompt = "You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as 'Assistant'."
|
| 8 |
-
prompt_input = "What is
|
| 9 |
|
| 10 |
-
#Generate LLM response
|
| 11 |
-
output = replicate.run('
|
| 12 |
-
input={
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
full_response = ''
|
| 18 |
|
| 19 |
for item in output:
|
| 20 |
-
|
| 21 |
|
| 22 |
print(full_response)
|
|
|
|
| 3 |
|
| 4 |
import replicate
|
| 5 |
|
| 6 |
+
# Prompts
|
| 7 |
pre_prompt = "You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as 'Assistant'."
|
| 8 |
+
prompt_input = "What is Hugging Face"
|
| 9 |
|
| 10 |
+
# Generate LLM response
|
| 11 |
+
output = replicate.run('huggingface/llama-base-125M', # LLM model
|
| 12 |
+
input={
|
| 13 |
+
"prompt": f"{pre_prompt} {prompt_input} Assistant: ", # Prompts
|
| 14 |
+
"temperature": 0.1,
|
| 15 |
+
"top_p": 0.9,
|
| 16 |
+
"max_length": 124,
|
| 17 |
+
"repetition_penalty": 1
|
| 18 |
+
}) # Model parameters
|
| 19 |
|
| 20 |
full_response = ''
|
| 21 |
|
| 22 |
for item in output:
|
| 23 |
+
full_response += item
|
| 24 |
|
| 25 |
print(full_response)
|