File size: 5,673 Bytes
dc893fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
"""Example: Using LLMClient with different providers.

This example demonstrates how to use the LLMClient wrapper with different
LLM providers (Anthropic or OpenAI) through the provider parameter.
"""

import asyncio
import os
from pathlib import Path

import yaml

from mini_agent import LLMClient, LLMProvider, Message


async def demo_anthropic_provider():
    """Demo using LLMClient with Anthropic provider."""
    print("\n" + "=" * 60)
    print("DEMO: LLMClient with Anthropic Provider")
    print("=" * 60)

    # Load config
    config_path = Path("mini_agent/config/config.yaml")
    with open(config_path, encoding="utf-8") as f:
        config = yaml.safe_load(f)

    # Initialize client with Anthropic provider
    client = LLMClient(
        api_key=config["api_key"],
        provider=LLMProvider.ANTHROPIC,  # Specify Anthropic provider
        model=config.get("model", "MiniMax-M2.1"),
    )

    print(f"Provider: {client.provider}")
    print(f"API Base: {client.api_base}")

    # Simple question
    messages = [Message(role="user", content="Say 'Hello from Anthropic!'")]
    print(f"\nπŸ‘€ User: {messages[0].content}")

    try:
        response = await client.generate(messages)
        if response.thinking:
            print(f"πŸ’­ Thinking: {response.thinking}")
        print(f"πŸ’¬ Model: {response.content}")
        print("βœ… Anthropic provider demo completed")
    except Exception as e:
        print(f"❌ Error: {e}")


async def demo_openai_provider():
    """Demo using LLMClient with OpenAI provider."""
    print("\n" + "=" * 60)
    print("DEMO: LLMClient with OpenAI Provider")
    print("=" * 60)

    # Load config
    config_path = Path("mini_agent/config/config.yaml")
    with open(config_path, encoding="utf-8") as f:
        config = yaml.safe_load(f)

    # Initialize client with OpenAI provider
    client = LLMClient(
        api_key=config["api_key"],
        provider=LLMProvider.OPENAI,  # Specify OpenAI provider
        model=config.get("model", "MiniMax-M2.1"),
    )

    print(f"Provider: {client.provider}")
    print(f"API Base: {client.api_base}")

    # Simple question
    messages = [Message(role="user", content="Say 'Hello from OpenAI!'")]
    print(f"\nπŸ‘€ User: {messages[0].content}")

    try:
        response = await client.generate(messages)
        if response.thinking:
            print(f"πŸ’­ Thinking: {response.thinking}")
        print(f"πŸ’¬ Model: {response.content}")
        print("βœ… OpenAI provider demo completed")
    except Exception as e:
        print(f"❌ Error: {e}")


async def demo_default_provider():
    """Demo using LLMClient with default provider."""
    print("\n" + "=" * 60)
    print("DEMO: LLMClient with Default Provider (Anthropic)")
    print("=" * 60)

    # Load config
    config_path = Path("mini_agent/config/config.yaml")
    with open(config_path, encoding="utf-8") as f:
        config = yaml.safe_load(f)

    # Initialize client without specifying provider (defaults to Anthropic)
    client = LLMClient(
        api_key=config["api_key"],
        model=config.get("model", "MiniMax-M2.1"),
    )

    print(f"Provider (default): {client.provider}")
    print(f"API Base: {client.api_base}")

    # Simple question
    messages = [Message(role="user", content="Say 'Hello with default provider!'")]
    print(f"\nπŸ‘€ User: {messages[0].content}")

    try:
        response = await client.generate(messages)
        print(f"πŸ’¬ Model: {response.content}")
        print("βœ… Default provider demo completed")
    except Exception as e:
        print(f"❌ Error: {e}")


async def demo_provider_comparison():
    """Compare responses from both providers."""
    print("\n" + "=" * 60)
    print("DEMO: Provider Comparison")
    print("=" * 60)

    # Load config
    config_path = Path("mini_agent/config/config.yaml")
    with open(config_path, encoding="utf-8") as f:
        config = yaml.safe_load(f)

    # Create clients for both providers
    anthropic_client = LLMClient(
        api_key=config["api_key"],
        provider=LLMProvider.ANTHROPIC,
        model=config.get("model", "MiniMax-M2.1"),
    )

    openai_client = LLMClient(
        api_key=config["api_key"],
        provider=LLMProvider.OPENAI,
        model=config.get("model", "MiniMax-M2.1"),
    )

    # Same question for both
    messages = [Message(role="user", content="What is 2+2?")]
    print(f"\nπŸ‘€ Question: {messages[0].content}\n")

    try:
        # Get response from Anthropic
        anthropic_response = await anthropic_client.generate(messages)
        print(f"πŸ”΅ Anthropic: {anthropic_response.content}")

        # Get response from OpenAI
        openai_response = await openai_client.generate(messages)
        print(f"🟒 OpenAI: {openai_response.content}")

        print("\nβœ… Provider comparison completed")
    except Exception as e:
        print(f"❌ Error: {e}")


async def main():
    """Run all demos."""
    print("\nπŸš€ LLM Provider Selection Demo")
    print("This demo shows how to use LLMClient with different providers.")
    print("Make sure you have configured API key in config.yaml.")

    try:
        # Demo default provider
        await demo_default_provider()

        # Demo Anthropic provider
        await demo_anthropic_provider()

        # Demo OpenAI provider
        await demo_openai_provider()

        # Demo provider comparison
        await demo_provider_comparison()

        print("\nβœ… All demos completed successfully!")

    except Exception as e:
        print(f"\n❌ Error: {e}")
        import traceback

        traceback.print_exc()


if __name__ == "__main__":
    asyncio.run(main())