webshop-hsl-seed123 / hsl_code_snapshot /llm_proxy_examples.py
heendung's picture
Upload folder using huggingface_hub
d1c897a verified
raw
history blame contribute delete
745 Bytes
import base64
from openai import OpenAI
endpoint = "<Insert your endpoint>" # Endpoint will be different depending on whether you will access from corp network (i.e. on your laptop in office or on VPN) or from pluto instances
key = "<Insert your key>" # e.g. sk-1234567890, to get one, use Slack command /get-llm-cred
model = "<Insert your model id>" # e.g llama-3-1-8b
# Initialize the client
client = OpenAI(
api_key = "Bearer " + key,
base_url = endpoint,
)
# Text input
chat_response = client.chat.completions.create(
model = model,
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Tell me a joke."},
]
)
print("Chat response:", chat_response)