Spaces:
Runtime error
Runtime error
| # OpenELM OpenAI & Anthropic API - Curl Examples | |
| # | |
| # This script demonstrates how to call the OpenELM API using both | |
| # OpenAI and Anthropic compatible endpoints with curl commands. | |
| # | |
| # Usage: | |
| # chmod +x examples/curl_examples.sh | |
| # ./examples/curl_examples.sh | |
| # Set API base URL (default: localhost:8000) | |
| API_URL="${OPENELM_API_URL:-http://localhost:8000}" | |
| API_URL="${API_URL%/}" # Remove trailing slash | |
| echo "==============================================" | |
| echo "OpenELM OpenAI & Anthropic API - Curl Examples" | |
| echo "==============================================" | |
| echo "API URL: $API_URL" | |
| echo "" | |
| # Example 1: Health Check | |
| echo "Example 1: Health Check" | |
| echo "------------------------" | |
| curl -s "$API_URL/health" | python3 -m json.tool | |
| echo "" | |
| # ============================================ | |
| # OpenAI API Examples | |
| # ============================================ | |
| echo "##########################################" | |
| echo "# OpenAI API Examples #" | |
| echo "##########################################" | |
| echo "" | |
| # Example 2: OpenAI - List Available Models | |
| echo "Example 2: OpenAI - List Available Models" | |
| echo "-------------------------------------------" | |
| curl -s "$API_URL/v1/models" | python3 -m json.tool | |
| echo "" | |
| # Example 3: OpenAI - Basic Chat Completion | |
| echo "Example 3: OpenAI - Basic Chat Completion" | |
| echo "--------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/chat/completions" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "Say hello in a friendly way!" | |
| } | |
| ], | |
| "max_tokens": 100, | |
| "temperature": 0.7 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 4: OpenAI - Multi-turn Conversation | |
| echo "Example 4: OpenAI - Multi-turn Conversation" | |
| echo "--------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/chat/completions" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "What is Python?" | |
| }, | |
| { | |
| "role": "assistant", | |
| "content": "Python is a high-level programming language." | |
| }, | |
| { | |
| "role": "user", | |
| "content": "What is it used for?" | |
| } | |
| ], | |
| "max_tokens": 150, | |
| "temperature": 0.5 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 5: OpenAI - Using System Message | |
| echo "Example 5: OpenAI - Using System Message" | |
| echo "------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/chat/completions" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "system", | |
| "content": "You are a helpful coding assistant." | |
| }, | |
| { | |
| "role": "user", | |
| "content": "What is a decorator?" | |
| } | |
| ], | |
| "max_tokens": 200, | |
| "temperature": 0.8 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 6: OpenAI - Deterministic Generation | |
| echo "Example 6: OpenAI - Deterministic Generation" | |
| echo "----------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/chat/completions" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "What is 2 + 2?" | |
| } | |
| ], | |
| "max_tokens": 50, | |
| "temperature": 0.0 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 7: OpenAI - Streaming Response | |
| echo "Example 7: OpenAI - Streaming Response" | |
| echo "----------------------------------------" | |
| echo "Streaming output:" | |
| curl -s -X POST "$API_URL/v1/chat/completions" \ | |
| -H "Content-Type: application/json" \ | |
| -H "Accept: text/event-stream" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "Count to 3, one per line." | |
| } | |
| ], | |
| "max_tokens": 100, | |
| "temperature": 0.7, | |
| "stream": true | |
| }' | head -20 | |
| echo "" | |
| echo "" | |
| # ============================================ | |
| # Anthropic API Examples | |
| # ============================================ | |
| echo "##########################################" | |
| echo "# Anthropic API Examples #" | |
| echo "##########################################" | |
| echo "" | |
| # Example 8: Anthropic - List Available Models | |
| echo "Example 8: Anthropic - List Available Models" | |
| echo "----------------------------------------------" | |
| curl -s "$API_URL/v1/models" | python3 -m json.tool | |
| echo "" | |
| # Example 9: Anthropic - Basic Message Generation | |
| echo "Example 9: Anthropic - Basic Message Generation" | |
| echo "-------------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/messages" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "Say hello in a friendly way!" | |
| } | |
| ], | |
| "max_tokens": 100, | |
| "temperature": 0.7 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 10: Anthropic - Multi-turn Conversation | |
| echo "Example 10: Anthropic - Multi-turn Conversation" | |
| echo "-------------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/messages" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "What is AI?" | |
| }, | |
| { | |
| "role": "assistant", | |
| "content": "AI stands for Artificial Intelligence." | |
| }, | |
| { | |
| "role": "user", | |
| "content": "Tell me more." | |
| } | |
| ], | |
| "max_tokens": 150, | |
| "temperature": 0.5 | |
| }' | python3 -m json.tool | |
| echo "" | |
| # Example 11: Anthropic - Using System Prompt | |
| echo "Example 11: Anthropic - Using System Prompt" | |
| echo "----------------------------------------------" | |
| curl -s -X POST "$API_URL/v1/messages" \ | |
| -H "Content-Type: application/json" \ | |
| -d '{ | |
| "model": "openelm-450m-instruct", | |
| "messages": [ | |
| { | |
| "role": "user", | |
| "content": "Explain quantum computing." | |
| } | |
| ], | |
| "system": "You are a science educator who explains complex topics simply.", | |
| "max_tokens": 200, | |
| "temperature": 0.8 | |
| }' | python3 -m json.tool | |
| echo "" | |
| echo "==============================================" | |
| echo "All curl examples completed!" | |
| echo "==============================================" | |