agentic-api / examples /anthropic_sdk_example.py
MiniMax Agent
Add Anthropic API compatible wrapper for OpenELM models
9604400
"""
Example: Using Anthropic SDK with OpenELM API
This example demonstrates how to use the Anthropic SDK (or compatible client)
to call OpenELM models through our Anthropic API compatible wrapper.
Usage:
python examples/anthropic_sdk_example.py
"""
import sys
import os
# Add parent directory to path for imports
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app import AnthropicClient
def main():
"""Example usage of the Anthropic-compatible OpenELM API."""
# Create client pointing to our local API
base_url = os.environ.get("OPENELM_API_URL", "http://localhost:8000")
client = AnthropicClient(base_url=base_url, api_key="dummy-key")
print("=" * 60)
print("OpenELM Anthropic API - Usage Example")
print("=" * 60)
print(f"API URL: {base_url}")
print()
# Example 1: Basic message generation
print("Example 1: Basic Message Generation")
print("-" * 40)
response = client.messages().create(
model="openelm-450m-instruct",
messages=[
{"role": "user", "content": "Say hello in a friendly way!"}
],
max_tokens=100,
temperature=0.7
)
print(f"Response ID: {response['id']}")
print(f"Model: {response['model']}")
print(f"Content: {response['content'][0]['text']}")
print(f"Usage: {response['usage']}")
print()
# Example 2: Multi-turn conversation
print("Example 2: Multi-turn Conversation")
print("-" * 40)
response = client.messages().create(
model="openelm-450m-instruct",
messages=[
{"role": "user", "content": "What is artificial intelligence?"},
{"role": "assistant", "content": "Artificial intelligence, or AI, refers to systems that can perform tasks that typically require human intelligence."},
{"role": "user", "content": "Can you give me some examples?"}
],
max_tokens=150,
temperature=0.5
)
print(f"Content: {response['content'][0]['text']}")
print(f"Usage: {response['usage']}")
print()
# Example 3: Using system prompt
print("Example 3: Using System Prompt")
print("-" * 40)
response = client.messages().create(
model="openelm-450m-instruct",
messages=[
{"role": "user", "content": "Explain quantum computing simply."}
],
system="You are a helpful science educator who explains complex topics simply.",
max_tokens=200,
temperature=0.8
)
print(f"Content: {response['content'][0]['text']}")
print(f"Usage: {response['usage']}")
print()
# Example 4: Deterministic generation (temperature=0)
print("Example 4: Deterministic Generation (temperature=0)")
print("-" * 40)
response = client.messages().create(
model="openelm-450m-instruct",
messages=[
{"role": "user", "content": "What is 2 + 2?"}
],
max_tokens=50,
temperature=0.0 # Deterministic output
)
print(f"Content: {response['content'][0]['text']}")
print(f"Usage: {response['usage']}")
print()
print("=" * 60)
print("All examples completed successfully!")
print("=" * 60)
if __name__ == "__main__":
main()