|
|
import json |
|
|
import traceback |
|
|
import uuid |
|
|
from datetime import datetime |
|
|
|
|
|
import fastapi |
|
|
import httpx |
|
|
import time |
|
|
from fastapi import FastAPI, BackgroundTasks, Request |
|
|
from starlette.responses import StreamingResponse, Response |
|
|
|
|
|
app = FastAPI() |
|
|
token = '' |
|
|
async def make_request_to_139_ai_streaming_chat(user_message): |
|
|
url = "https://ai.yun.139.com/api/outer/assistant/chat/add" |
|
|
|
|
|
headers = { |
|
|
"sec-ch-ua-platform": "\"Windows\"", |
|
|
"authorization": f"Basic {token}", |
|
|
"x-yun-client-info": "4g||30|||||||1685/948|zh-CN||||", |
|
|
"sec-ch-ua": "\"Microsoft Edge\";v=\"135\", \"Not-A.Brand\";v=\"8\", \"Chromium\";v=\"135\"", |
|
|
"sec-ch-ua-mobile": "?0", |
|
|
"x-yun-api-version": "v2", |
|
|
"x-yun-app-channel": "10175", |
|
|
"accept": "text/event-stream", |
|
|
"dnt": "1", |
|
|
"content-type": "application/json", |
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36 Edg/135.0.0.0", |
|
|
"origin": "https://appmail.mail.10086.cn", |
|
|
"sec-fetch-site": "cross-site", |
|
|
"sec-fetch-mode": "cors", |
|
|
"sec-fetch-dest": "empty", |
|
|
"referer": "https://appmail.mail.10086.cn/", |
|
|
"accept-language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,en-GB;q=0.6", |
|
|
"priority": "u=1, i" |
|
|
} |
|
|
|
|
|
|
|
|
payload = { |
|
|
"applicationId": "3000000002", |
|
|
"applicationType": "intelligent", |
|
|
"sessionId": "", |
|
|
"content": { |
|
|
"dialogue": user_message, |
|
|
"prompt": "", |
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"commands": "000", |
|
|
"resourceType": "0", |
|
|
"resourceId": "", |
|
|
"dialogueType": "0", |
|
|
"sourceChannel": "10175", |
|
|
"commandType": "2", |
|
|
"extInfo": "{\"h5Version\":\"1.5.1\"}" |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async with httpx.AsyncClient() as client: |
|
|
async with client.stream( |
|
|
"POST", |
|
|
url, |
|
|
headers=headers, |
|
|
json=payload, |
|
|
timeout=30.0 |
|
|
) as response: |
|
|
response.raise_for_status() |
|
|
response_id = f"chatcmpl-{str(uuid.uuid4())}" |
|
|
async for line in response.aiter_lines(): |
|
|
|
|
|
if line.startswith('data:'): |
|
|
data_json = json.loads(line[5:].strip()) |
|
|
if 'data' in data_json and 'flowResult' in data_json['data'] and 'outContent' in data_json['data']['flowResult']: |
|
|
delta_text = data_json['data']['flowResult']['outContent'] |
|
|
|
|
|
|
|
|
|
|
|
chunk = { |
|
|
"id": response_id, |
|
|
"object": "chat.completion.chunk", |
|
|
"created": int(time.time()), |
|
|
"model": "DeepSeek-R1", |
|
|
"choices": [ |
|
|
{ |
|
|
"index": 0, |
|
|
"delta": { |
|
|
"content": delta_text |
|
|
}, |
|
|
"finish_reason": None |
|
|
} |
|
|
] |
|
|
} |
|
|
|
|
|
yield f"data: {json.dumps(chunk)}\n\n" |
|
|
final_chunk = { |
|
|
"id": response_id, |
|
|
"object": "chat.completion.chunk", |
|
|
"created": int(time.time()), |
|
|
"model": "DeepSeek-R1", |
|
|
"choices": [ |
|
|
{ |
|
|
"index": 0, |
|
|
"delta": {}, |
|
|
"finish_reason": "stop" |
|
|
} |
|
|
] |
|
|
} |
|
|
yield f"data: {json.dumps(final_chunk)}\n\n" |
|
|
yield "data: [DONE]\n\n" |
|
|
|
|
|
async def make_request_to_139_ai_non_streaming_chat(user_message): |
|
|
""" |
|
|
向139 AI发送异步请求 |
|
|
""" |
|
|
url = "https://ai.yun.139.com/api/outer/assistant/chat/add" |
|
|
|
|
|
|
|
|
headers = { |
|
|
"sec-ch-ua-platform": "\"Windows\"", |
|
|
"authorization": f"Basic {token}", |
|
|
"x-yun-client-info": "4g||30|||||||1685/948|zh-CN||||", |
|
|
"sec-ch-ua": "\"Microsoft Edge\";v=\"135\", \"Not-A.Brand\";v=\"8\", \"Chromium\";v=\"135\"", |
|
|
"sec-ch-ua-mobile": "?0", |
|
|
"x-yun-api-version": "v2", |
|
|
"x-yun-app-channel": "10175", |
|
|
|
|
|
"dnt": "1", |
|
|
"content-type": "application/json", |
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36 Edg/135.0.0.0", |
|
|
"origin": "https://appmail.mail.10086.cn", |
|
|
"sec-fetch-site": "cross-site", |
|
|
"sec-fetch-mode": "cors", |
|
|
"sec-fetch-dest": "empty", |
|
|
"referer": "https://appmail.mail.10086.cn/", |
|
|
"accept-language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,en-GB;q=0.6", |
|
|
"priority": "u=1, i" |
|
|
} |
|
|
|
|
|
|
|
|
payload = { |
|
|
"applicationId": "3000000002", |
|
|
"applicationType": "intelligent", |
|
|
"sessionId": "", |
|
|
"content": { |
|
|
"dialogue": user_message, |
|
|
"prompt": "", |
|
|
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"commands": "000", |
|
|
"resourceType": "0", |
|
|
"resourceId": "", |
|
|
"dialogueType": "0", |
|
|
"sourceChannel": "10175", |
|
|
"commandType": "2", |
|
|
"extInfo": "{\"h5Version\":\"1.5.1\"}" |
|
|
} |
|
|
} |
|
|
|
|
|
async with httpx.AsyncClient() as client: |
|
|
async with client.stream( |
|
|
"POST", |
|
|
url, |
|
|
headers=headers, |
|
|
json=payload, |
|
|
timeout=30.0 |
|
|
) as response: |
|
|
response.raise_for_status() |
|
|
reasoning_content = "" |
|
|
full_content = "" |
|
|
async for line in response.aiter_lines(): |
|
|
|
|
|
if line.startswith('data:'): |
|
|
data_json = json.loads(line[5:].strip()) |
|
|
if 'data' in data_json and 'flowResult' in data_json['data'] and 'outContent' in data_json['data']['flowResult']: |
|
|
delta_text = data_json['data']['flowResult']['outContent'] |
|
|
full_content += delta_text |
|
|
response_data = { |
|
|
"id": f"chatcmpl-{str(uuid.uuid4())}", |
|
|
"object": "chat.completion", |
|
|
"created": int(time.time()), |
|
|
"model": data_json.get('modelType', 'DeepSeek-R1'), |
|
|
"choices": [ |
|
|
{ |
|
|
"index": 0, |
|
|
"message": {"role": "assistant", "content": full_content}, |
|
|
"finish_reason": "stop" |
|
|
} |
|
|
], |
|
|
"usage": |
|
|
{ |
|
|
"prompt_tokens": len(user_message), |
|
|
"completion_tokens": len(full_content), |
|
|
"total_tokens": len(user_message) + len(full_content) |
|
|
} |
|
|
} |
|
|
return response_data |
|
|
|
|
|
@app.post("/v1/chat/completions") |
|
|
async def chat_endpoint(request: Request, background_tasks: BackgroundTasks): |
|
|
""" |
|
|
API 端点,用于发送聊天请求到139 AI |
|
|
""" |
|
|
try: |
|
|
auth_header = request.headers.get('Authorization', '') |
|
|
|
|
|
if auth_header.startswith('Bearer '): |
|
|
global token |
|
|
token = auth_header[7:] |
|
|
else: |
|
|
raise fastapi.HTTPException(status_code=401, detail="Authorization header must be a Bearer token") |
|
|
user_message = "" |
|
|
request_body = await request.json() |
|
|
for msg in reversed(request_body.get('messages', [])): |
|
|
if msg.get('role') == 'user': |
|
|
user_message = msg.get('content', '') |
|
|
break |
|
|
if not user_message: |
|
|
raise fastapi.HTTPException(status_code=400, detail="No user message found") |
|
|
print(f"user input: {user_message}") |
|
|
|
|
|
if request_body.get('stream'): |
|
|
return StreamingResponse( |
|
|
content=make_request_to_139_ai_streaming_chat(user_message), |
|
|
media_type="text/event-stream", |
|
|
|
|
|
headers={ |
|
|
"Cache-Control": "no-cache", |
|
|
"Connection": "keep-alive", |
|
|
"Transfer-Encoding": "chunked" |
|
|
} |
|
|
) |
|
|
else: |
|
|
|
|
|
response_data = await make_request_to_139_ai_non_streaming_chat(user_message) |
|
|
return Response( |
|
|
content=json.dumps(response_data), |
|
|
media_type="application/json", |
|
|
status_code=200, |
|
|
|
|
|
) |
|
|
except Exception as e: |
|
|
traceback.print_exc() |
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
@app.get('/v1/models') |
|
|
async def list_models(): |
|
|
models = { |
|
|
"object": "list", |
|
|
"data": [ |
|
|
{ |
|
|
"id": "DeepSeek-R1", |
|
|
"object": "model", |
|
|
"created": int(time.time()), |
|
|
"owned_by": "139.com" |
|
|
} |
|
|
] |
|
|
} |
|
|
return models |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
import uvicorn |
|
|
uvicorn.run(app, host="0.0.0.0", port=7860) |
|
|
|