junli16 commited on
Commit
3cfffe7
·
verified ·
1 Parent(s): 44a5542

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -0
app.py CHANGED
@@ -4,6 +4,43 @@ GAIA Smart Agent
4
  智能搜索和文件处理工具,支持LLM
5
  """
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  import os
8
  import asyncio
9
  import aiohttp
@@ -131,6 +168,7 @@ class DashScopeLLM:
131
  self.available = False
132
  else:
133
  print("[WARN] DashScope LLM not available - using fallback mode")
 
134
 
135
  # 初始化LLM速率限制器
136
  self._rate_limiter = []
 
4
  智能搜索和文件处理工具,支持LLM
5
  """
6
 
7
+ import os
8
+ from openai import OpenAI
9
+
10
+ client = OpenAI(
11
+ # 使用你提供的API Key
12
+ api_key="sk-757ddbeab14343dd80d270c418e7ed74",
13
+ base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
14
+ )
15
+ completion = client.chat.completions.create(
16
+ model="qwen3-max",
17
+ messages=[
18
+ {"role": "system", "content": "You are a helpful assistant."},
19
+ {"role": "user", "content": "你是谁?"},
20
+ ],
21
+ stream=True
22
+ )
23
+ for chunk in completion:
24
+ # 处理可能的空内容(避免NoneType错误)
25
+ if chunk.choices[0].delta.content:
26
+ print(chunk.choices[0].delta.content, end="", flush=True)
27
+
28
+
29
+
30
+
31
+
32
+
33
+
34
+
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
  import os
45
  import asyncio
46
  import aiohttp
 
168
  self.available = False
169
  else:
170
  print("[WARN] DashScope LLM not available - using fallback mode")
171
+
172
 
173
  # 初始化LLM速率限制器
174
  self._rate_limiter = []