from openai import OpenAI # 假设 llama_index 是一个可用的库 from llama_index import LlamaIndex base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/" api_key = "eyJ0eXBlIjoiSldUIiwiYWxnIjoiSFM1MTIifQ.eyJqdGkiOiI1MDA2ODc0NiIsInJvbCI6IlJPTEVfUkVHSVNURVIiLCJpc3MiOiJPcGVuWExhYiIsImlhdCI6MTczOTM3NzUxNywiY2xpZW50SWQiOiJlYm1ydm9kNnlvMG5semFlazF5cCIsInBob25lIjoiMTg1MDIyMTY2NjAiLCJ1dWlkIjoiZmEzNWViYzItZTMyYi00YzIwLWE3ODAtOGQyMmNkOWQ0ZDE0IiwiZW1haWwiOiJqel9qaG0wNTE2QDE2My5jb20iLCJleHAiOjE3NTQ5Mjk1MTd9.KPaMHIj99xA6KC6OgF3N5cl1mYvvqziA3nIKI8_OZUoBtagmkAYtxTqXVYydovrdHgtsHFxfl50p-dld8OJH3w" model="internlm2.5-latest" # base_url = "https://api.siliconflow.cn/v1" # api_key = "sk-请填写准确的 token!" # model="internlm/internlm2_5-7b-chat" client = OpenAI( api_key=api_key , base_url=base_url, ) # 使用 LlamaIndex 进行索引和查询 # 假设 LlamaIndex 提供了一个简单的接口 llama_index = LlamaIndex() chat_rsp = client.chat.completions.create( model=model, messages=[{"role": "user", "content": "xtuner是什么?"}], ) for choice in chat_rsp.choices: print(choice.message.content) # 如果 LlamaIndex 需要使用,可以在此处添加相关代码 result = llama_index.query("xtuner是什么?") print(result)