codellama / app.py
typesdigital's picture
Update app.py
606b3d2
raw
history blame
766 Bytes
import os
os.environ['REPLICATE_API_TOKEN'] = "r8_afc5kESy4ucPojF3Tw1GE25ER4Ovudy1iPVw6"
import replicate
#Prompts
pre_prompt = "You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as 'Assistant'."
prompt_input = "What is streamlit"
#Generate LLM response
output = replicate.run('a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5', #LLM model
input={"prompt": f"{pre_prompt} {prompt_input} Assistant: ", #Prompts
"temperature":0.1, "top_p":0.9, "max_length":124, "repetition_penalty":1}) #Model parameter
return output
full_response = ''
for item in output:
full_response += item
print(full_response)