Spaces:
Sleeping
Sleeping
Upload 46 files
Browse files- openai_compat.py +30 -0
- proto/attachment.proto +57 -0
- proto/citations.proto +20 -0
- proto/debug.proto +12 -0
- proto/file_content.proto +18 -0
- proto/input_context.proto +64 -0
- proto/options.proto +12 -0
- proto/request.proto +173 -0
- proto/response.proto +159 -0
- proto/suggestions.proto +22 -0
- proto/task.proto +503 -0
- proto/todo.proto +23 -0
- protobuf2openai/__init__.py +3 -0
- protobuf2openai/app.py +49 -0
- protobuf2openai/bridge.py +105 -0
- protobuf2openai/config.py +14 -0
- protobuf2openai/helpers.py +54 -0
- protobuf2openai/logging.py +32 -0
- protobuf2openai/models.py +31 -0
- protobuf2openai/packets.py +137 -0
- protobuf2openai/reorder.py +96 -0
- protobuf2openai/router.py +219 -0
- protobuf2openai/sse_transform.py +345 -0
- protobuf2openai/state.py +23 -0
- pyproject.toml +25 -0
- server.py +585 -0
- start.py +38 -0
- uv.lock +863 -0
- warp2protobuf/__init__.py +4 -0
- warp2protobuf/api/__init__.py +3 -0
- warp2protobuf/api/protobuf_routes.py +623 -0
- warp2protobuf/config/__init__.py +3 -0
- warp2protobuf/config/models.py +328 -0
- warp2protobuf/config/settings.py +43 -0
- warp2protobuf/core/__init__.py +3 -0
- warp2protobuf/core/auth.py +341 -0
- warp2protobuf/core/logging.py +121 -0
- warp2protobuf/core/protobuf.py +284 -0
- warp2protobuf/core/protobuf_utils.py +310 -0
- warp2protobuf/core/schema_sanitizer.py +175 -0
- warp2protobuf/core/server_message_data.py +189 -0
- warp2protobuf/core/session.py +133 -0
- warp2protobuf/core/stream_processor.py +334 -0
- warp2protobuf/warp/__init__.py +2 -0
- warp2protobuf/warp/api_client.py +426 -0
- warp2protobuf/warp/response.py +204 -0
openai_compat.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
OpenAI Chat Completions compatible server (system-prompt flavored)
|
| 5 |
+
|
| 6 |
+
Startup entrypoint that exposes the modular app implemented in protobuf2openai.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from __future__ import annotations
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
import asyncio
|
| 13 |
+
|
| 14 |
+
from protobuf2openai.app import app # FastAPI app
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
if __name__ == "__main__":
|
| 18 |
+
import uvicorn
|
| 19 |
+
# Refresh JWT on startup before running the server
|
| 20 |
+
try:
|
| 21 |
+
from warp2protobuf.core.auth import refresh_jwt_if_needed as _refresh_jwt
|
| 22 |
+
asyncio.run(_refresh_jwt())
|
| 23 |
+
except Exception:
|
| 24 |
+
pass
|
| 25 |
+
uvicorn.run(
|
| 26 |
+
app,
|
| 27 |
+
host=os.getenv("HOST", "127.0.0.1"),
|
| 28 |
+
port=int(os.getenv("PORT", "8010")),
|
| 29 |
+
log_level="info",
|
| 30 |
+
)
|
proto/attachment.proto
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "options.proto";
|
| 6 |
+
|
| 7 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 8 |
+
|
| 9 |
+
message Attachment {
|
| 10 |
+
oneof value {
|
| 11 |
+
string plain_text = 1;
|
| 12 |
+
ExecutedShellCommand executed_shell_command = 2;
|
| 13 |
+
RunningShellCommand running_shell_command = 3;
|
| 14 |
+
DriveObject drive_object = 4;
|
| 15 |
+
}
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
message ExecutedShellCommand {
|
| 19 |
+
string command = 1;
|
| 20 |
+
string output = 2;
|
| 21 |
+
int32 exit_code = 3;
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
message RunningShellCommand {
|
| 25 |
+
string command = 1;
|
| 26 |
+
LongRunningShellCommandSnapshot snapshot = 2;
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
message LongRunningShellCommandSnapshot {
|
| 30 |
+
string output = 1;
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
message DriveObject {
|
| 34 |
+
string uid = 1;
|
| 35 |
+
|
| 36 |
+
oneof object_payload {
|
| 37 |
+
Workflow workflow = 2;
|
| 38 |
+
Notebook notebook = 3;
|
| 39 |
+
GenericStringObject generic_string_object = 4;
|
| 40 |
+
}
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
message Workflow {
|
| 44 |
+
string name = 1;
|
| 45 |
+
string description = 2;
|
| 46 |
+
string command = 3;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
message Notebook {
|
| 50 |
+
string title = 1;
|
| 51 |
+
string content = 2;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
message GenericStringObject {
|
| 55 |
+
string payload = 1;
|
| 56 |
+
string object_type = 2;
|
| 57 |
+
}
|
proto/citations.proto
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 6 |
+
|
| 7 |
+
message Citation {
|
| 8 |
+
string document_id = 1;
|
| 9 |
+
DocumentType document_type = 2;
|
| 10 |
+
}
|
| 11 |
+
|
| 12 |
+
enum DocumentType {
|
| 13 |
+
WARP_DRIVE_WORKFLOW = 0;
|
| 14 |
+
WARP_DRIVE_NOTEBOOK = 1;
|
| 15 |
+
WARP_DRIVE_ENV_VAR = 2;
|
| 16 |
+
RULE = 3;
|
| 17 |
+
WARP_DOCUMENTATION = 4;
|
| 18 |
+
WEB_PAGE = 5;
|
| 19 |
+
UNKNOWN = 6;
|
| 20 |
+
}
|
proto/debug.proto
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "task.proto";
|
| 6 |
+
|
| 7 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 8 |
+
|
| 9 |
+
message TaskList {
|
| 10 |
+
repeated Task tasks = 1;
|
| 11 |
+
repeated string ordered_message_ids = 2;
|
| 12 |
+
}
|
proto/file_content.proto
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "options.proto";
|
| 6 |
+
|
| 7 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 8 |
+
|
| 9 |
+
message FileContentLineRange {
|
| 10 |
+
uint32 start = 1;
|
| 11 |
+
uint32 end = 2;
|
| 12 |
+
}
|
| 13 |
+
|
| 14 |
+
message FileContent {
|
| 15 |
+
string file_path = 1;
|
| 16 |
+
string content = 2;
|
| 17 |
+
FileContentLineRange line_range = 3;
|
| 18 |
+
}
|
proto/input_context.proto
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "google/protobuf/timestamp.proto";
|
| 6 |
+
import "file_content.proto";
|
| 7 |
+
import "attachment.proto";
|
| 8 |
+
import "options.proto";
|
| 9 |
+
|
| 10 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 11 |
+
|
| 12 |
+
message InputContext {
|
| 13 |
+
Directory directory = 1;
|
| 14 |
+
message Directory {
|
| 15 |
+
string pwd = 1;
|
| 16 |
+
string home = 2;
|
| 17 |
+
bool pwd_file_symbols_indexed = 3;
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
OperatingSystem operating_system = 2;
|
| 21 |
+
message OperatingSystem {
|
| 22 |
+
string platform = 1;
|
| 23 |
+
string distribution = 2;
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
Shell shell = 3;
|
| 27 |
+
message Shell {
|
| 28 |
+
string name = 1;
|
| 29 |
+
string version = 2;
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
google.protobuf.Timestamp current_time = 4;
|
| 33 |
+
|
| 34 |
+
repeated Codebase codebases = 8;
|
| 35 |
+
message Codebase {
|
| 36 |
+
string name = 1;
|
| 37 |
+
string path = 2;
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
repeated ProjectRules project_rules = 10;
|
| 41 |
+
message ProjectRules {
|
| 42 |
+
string root_path = 1;
|
| 43 |
+
repeated FileContent active_rule_files = 2;
|
| 44 |
+
repeated string additional_rule_file_paths = 3;
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
repeated ExecutedShellCommand executed_shell_commands = 5 [deprecated = true];
|
| 48 |
+
|
| 49 |
+
repeated SelectedText selected_text = 6;
|
| 50 |
+
message SelectedText {
|
| 51 |
+
string text = 1;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
repeated Image images = 7;
|
| 55 |
+
message Image {
|
| 56 |
+
bytes data = 1;
|
| 57 |
+
string mime_type = 2;
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
repeated File files = 9;
|
| 61 |
+
message File {
|
| 62 |
+
FileContent content = 1;
|
| 63 |
+
}
|
| 64 |
+
}
|
proto/options.proto
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "google/protobuf/descriptor.proto";
|
| 6 |
+
|
| 7 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 8 |
+
|
| 9 |
+
extend google.protobuf.FieldOptions {
|
| 10 |
+
bool sensitive = 50000;
|
| 11 |
+
bool internal = 50001;
|
| 12 |
+
}
|
proto/request.proto
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "google/protobuf/struct.proto";
|
| 6 |
+
import "input_context.proto";
|
| 7 |
+
import "attachment.proto";
|
| 8 |
+
import "options.proto";
|
| 9 |
+
import "suggestions.proto";
|
| 10 |
+
import "task.proto";
|
| 11 |
+
|
| 12 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 13 |
+
|
| 14 |
+
message Request {
|
| 15 |
+
TaskContext task_context = 1;
|
| 16 |
+
message TaskContext {
|
| 17 |
+
repeated Task tasks = 1;
|
| 18 |
+
string active_task_id = 2;
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
Input input = 2;
|
| 22 |
+
message Input {
|
| 23 |
+
InputContext context = 1;
|
| 24 |
+
|
| 25 |
+
oneof type {
|
| 26 |
+
UserInputs user_inputs = 6;
|
| 27 |
+
QueryWithCannedResponse query_with_canned_response = 4;
|
| 28 |
+
AutoCodeDiffQuery auto_code_diff_query = 5;
|
| 29 |
+
ResumeConversation resume_conversation = 7;
|
| 30 |
+
InitProjectRules init_project_rules = 8;
|
| 31 |
+
UserQuery user_query = 2 [deprecated = true];
|
| 32 |
+
ToolCallResult tool_call_result = 3 [deprecated = true];
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
message UserQuery {
|
| 36 |
+
string query = 1;
|
| 37 |
+
map<string, Attachment> referenced_attachments = 2;
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
message UserInputs {
|
| 41 |
+
repeated UserInput inputs = 1;
|
| 42 |
+
message UserInput {
|
| 43 |
+
oneof input {
|
| 44 |
+
UserQuery user_query = 1;
|
| 45 |
+
ToolCallResult tool_call_result = 2;
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
message ToolCallResult {
|
| 51 |
+
string tool_call_id = 1;
|
| 52 |
+
|
| 53 |
+
oneof result {
|
| 54 |
+
RunShellCommandResult run_shell_command = 2;
|
| 55 |
+
ReadFilesResult read_files = 3;
|
| 56 |
+
SearchCodebaseResult search_codebase = 4;
|
| 57 |
+
ApplyFileDiffsResult apply_file_diffs = 5;
|
| 58 |
+
SuggestPlanResult suggest_plan = 6;
|
| 59 |
+
SuggestCreatePlanResult suggest_create_plan = 7;
|
| 60 |
+
GrepResult grep = 8;
|
| 61 |
+
FileGlobResult file_glob = 9;
|
| 62 |
+
RefineResult refine = 10;
|
| 63 |
+
ReadMCPResourceResult read_mcp_resource = 11;
|
| 64 |
+
CallMCPToolResult call_mcp_tool = 12;
|
| 65 |
+
WriteToLongRunningShellCommandResult write_to_long_running_shell_command = 13;
|
| 66 |
+
SuggestNewConversationResult suggest_new_conversation = 14;
|
| 67 |
+
FileGlobV2Result file_glob_v2 = 15;
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
message RefineResult {
|
| 71 |
+
UserQuery user_query = 1;
|
| 72 |
+
}
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
message QueryWithCannedResponse {
|
| 76 |
+
string query = 1;
|
| 77 |
+
|
| 78 |
+
oneof type {
|
| 79 |
+
Install install = 2;
|
| 80 |
+
Code code = 3;
|
| 81 |
+
Deploy deploy = 4;
|
| 82 |
+
SomethingElse something_else = 5;
|
| 83 |
+
CustomOnboardingRequest custom_onboarding_request = 6;
|
| 84 |
+
AgenticOnboardingKickoff agentic_onboarding_kickoff = 7;
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
message Install {
|
| 88 |
+
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
message Code {
|
| 92 |
+
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
message Deploy {
|
| 96 |
+
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
message SomethingElse {
|
| 100 |
+
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
message CustomOnboardingRequest {
|
| 104 |
+
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
message AgenticOnboardingKickoff {
|
| 108 |
+
|
| 109 |
+
}
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
message AutoCodeDiffQuery {
|
| 113 |
+
string query = 1;
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
message ResumeConversation {
|
| 117 |
+
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
message InitProjectRules {
|
| 121 |
+
|
| 122 |
+
}
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
Settings settings = 3;
|
| 126 |
+
message Settings {
|
| 127 |
+
ModelConfig model_config = 1;
|
| 128 |
+
message ModelConfig {
|
| 129 |
+
string base = 1;
|
| 130 |
+
string planning = 2;
|
| 131 |
+
string coding = 3;
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
bool rules_enabled = 2;
|
| 135 |
+
bool web_context_retrieval_enabled = 3;
|
| 136 |
+
bool supports_parallel_tool_calls = 4;
|
| 137 |
+
bool use_anthropic_text_editor_tools = 5;
|
| 138 |
+
bool planning_enabled = 6;
|
| 139 |
+
bool warp_drive_context_enabled = 7;
|
| 140 |
+
bool supports_create_files = 8;
|
| 141 |
+
repeated ToolType supported_tools = 9;
|
| 142 |
+
bool supports_long_running_commands = 10;
|
| 143 |
+
bool should_preserve_file_content_in_history = 11;
|
| 144 |
+
bool supports_todos_ui = 12;
|
| 145 |
+
bool supports_linked_code_blocks = 13;
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
Metadata metadata = 4;
|
| 149 |
+
message Metadata {
|
| 150 |
+
string conversation_id = 1;
|
| 151 |
+
map<string, google.protobuf.Value> logging = 2;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
Suggestions existing_suggestions = 5;
|
| 155 |
+
|
| 156 |
+
MCPContext mcp_context = 6;
|
| 157 |
+
message MCPContext {
|
| 158 |
+
repeated MCPResource resources = 1;
|
| 159 |
+
message MCPResource {
|
| 160 |
+
string uri = 1;
|
| 161 |
+
string name = 2;
|
| 162 |
+
string description = 3;
|
| 163 |
+
string mime_type = 4;
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
repeated MCPTool tools = 2;
|
| 167 |
+
message MCPTool {
|
| 168 |
+
string name = 1;
|
| 169 |
+
string description = 2;
|
| 170 |
+
google.protobuf.Struct input_schema = 3;
|
| 171 |
+
}
|
| 172 |
+
}
|
| 173 |
+
}
|
proto/response.proto
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "google/protobuf/field_mask.proto";
|
| 6 |
+
import "options.proto";
|
| 7 |
+
import "suggestions.proto";
|
| 8 |
+
import "task.proto";
|
| 9 |
+
|
| 10 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 11 |
+
|
| 12 |
+
message ResponseEvent {
|
| 13 |
+
oneof type {
|
| 14 |
+
StreamInit init = 1;
|
| 15 |
+
ClientActions client_actions = 2;
|
| 16 |
+
StreamFinished finished = 3;
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
message StreamInit {
|
| 20 |
+
string conversation_id = 1;
|
| 21 |
+
string request_id = 2;
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
message ClientActions {
|
| 25 |
+
repeated ClientAction actions = 1;
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
message StreamFinished {
|
| 29 |
+
repeated TokenUsage token_usage = 8;
|
| 30 |
+
message TokenUsage {
|
| 31 |
+
string model_id = 1;
|
| 32 |
+
uint32 total_input = 2;
|
| 33 |
+
uint32 output = 3;
|
| 34 |
+
uint32 input_cache_read = 4;
|
| 35 |
+
uint32 input_cache_write = 5;
|
| 36 |
+
float cost_in_cents = 6;
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
bool should_refresh_model_config = 9;
|
| 40 |
+
|
| 41 |
+
RequestCost request_cost = 10;
|
| 42 |
+
message RequestCost {
|
| 43 |
+
float exact = 1;
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
ContextWindowInfo context_window_info = 11;
|
| 47 |
+
message ContextWindowInfo {
|
| 48 |
+
float context_window_usage = 1;
|
| 49 |
+
bool summarized = 2;
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
oneof reason {
|
| 53 |
+
Other other = 1;
|
| 54 |
+
Done done = 2;
|
| 55 |
+
ReachedMaxTokenLimit max_token_limit = 3;
|
| 56 |
+
QuotaLimit quota_limit = 4;
|
| 57 |
+
ContextWindowExceeded context_window_exceeded = 5;
|
| 58 |
+
LLMUnavailable llm_unavailable = 6;
|
| 59 |
+
InternalError internal_error = 7;
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
message Other {
|
| 63 |
+
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
message Done {
|
| 67 |
+
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
message ReachedMaxTokenLimit {
|
| 71 |
+
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
message QuotaLimit {
|
| 75 |
+
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
message ContextWindowExceeded {
|
| 79 |
+
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
message LLMUnavailable {
|
| 83 |
+
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
message InternalError {
|
| 87 |
+
string message = 1;
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
message ClientAction {
|
| 93 |
+
oneof action {
|
| 94 |
+
CreateTask create_task = 1;
|
| 95 |
+
UpdateTaskStatus update_task_status = 2;
|
| 96 |
+
AddMessagesToTask add_messages_to_task = 3;
|
| 97 |
+
UpdateTaskMessage update_task_message = 4;
|
| 98 |
+
AppendToMessageContent append_to_message_content = 5;
|
| 99 |
+
Suggestions show_suggestions = 6;
|
| 100 |
+
UpdateTaskSummary update_task_summary = 7;
|
| 101 |
+
UpdateTaskDescription update_task_description = 8;
|
| 102 |
+
BeginTransaction begin_transaction = 9;
|
| 103 |
+
CommitTransaction commit_transaction = 10;
|
| 104 |
+
RollbackTransaction rollback_transaction = 11;
|
| 105 |
+
StartNewConversation start_new_conversation = 12;
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
message CreateTask {
|
| 109 |
+
Task task = 1;
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
message UpdateTaskStatus {
|
| 113 |
+
string task_id = 1;
|
| 114 |
+
TaskStatus task_status = 2;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
message UpdateTaskDescription {
|
| 118 |
+
string task_id = 1;
|
| 119 |
+
string description = 2;
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
message AddMessagesToTask {
|
| 123 |
+
string task_id = 1;
|
| 124 |
+
repeated Message messages = 2;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
message UpdateTaskMessage {
|
| 128 |
+
string task_id = 3;
|
| 129 |
+
Message message = 1;
|
| 130 |
+
google.protobuf.FieldMask mask = 2;
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
message AppendToMessageContent {
|
| 134 |
+
string task_id = 3;
|
| 135 |
+
Message message = 1;
|
| 136 |
+
google.protobuf.FieldMask mask = 2;
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
message UpdateTaskSummary {
|
| 140 |
+
string task_id = 1;
|
| 141 |
+
string summary = 2;
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
message BeginTransaction {
|
| 145 |
+
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
message CommitTransaction {
|
| 149 |
+
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
message RollbackTransaction {
|
| 153 |
+
|
| 154 |
+
}
|
| 155 |
+
|
| 156 |
+
message StartNewConversation {
|
| 157 |
+
string start_from_message_id = 1;
|
| 158 |
+
}
|
| 159 |
+
}
|
proto/suggestions.proto
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 6 |
+
|
| 7 |
+
message Suggestions {
|
| 8 |
+
repeated SuggestedRule rules = 1;
|
| 9 |
+
repeated SuggestedAgentModeWorkflow workflows = 2;
|
| 10 |
+
}
|
| 11 |
+
|
| 12 |
+
message SuggestedRule {
|
| 13 |
+
string name = 1;
|
| 14 |
+
string content = 2;
|
| 15 |
+
string logging_id = 3;
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
message SuggestedAgentModeWorkflow {
|
| 19 |
+
string name = 1;
|
| 20 |
+
string prompt = 2;
|
| 21 |
+
string logging_id = 3;
|
| 22 |
+
}
|
proto/task.proto
ADDED
|
@@ -0,0 +1,503 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
import "google/protobuf/empty.proto";
|
| 6 |
+
import "google/protobuf/descriptor.proto";
|
| 7 |
+
import "google/protobuf/struct.proto";
|
| 8 |
+
import "citations.proto";
|
| 9 |
+
import "input_context.proto";
|
| 10 |
+
import "attachment.proto";
|
| 11 |
+
import "file_content.proto";
|
| 12 |
+
import "options.proto";
|
| 13 |
+
import "todo.proto";
|
| 14 |
+
|
| 15 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 16 |
+
|
| 17 |
+
message Task {
|
| 18 |
+
string id = 1;
|
| 19 |
+
string description = 2;
|
| 20 |
+
|
| 21 |
+
Dependencies dependencies = 3;
|
| 22 |
+
message Dependencies {
|
| 23 |
+
string parent_task_id = 1;
|
| 24 |
+
repeated string sibling_dependencies = 2;
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
TaskStatus status = 4;
|
| 28 |
+
repeated Message messages = 5;
|
| 29 |
+
string summary = 6;
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
message TaskStatus {
|
| 33 |
+
oneof status {
|
| 34 |
+
Pending pending = 1;
|
| 35 |
+
InProgress in_progress = 2;
|
| 36 |
+
Blocked blocked = 3;
|
| 37 |
+
Succeeded succeeded = 4;
|
| 38 |
+
Failed failed = 5;
|
| 39 |
+
Aborted aborted = 6;
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
message Pending {
|
| 43 |
+
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
message InProgress {
|
| 47 |
+
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
message Blocked {
|
| 51 |
+
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
message Succeeded {
|
| 55 |
+
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
message Failed {
|
| 59 |
+
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
message Aborted {
|
| 63 |
+
|
| 64 |
+
}
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
message Message {
|
| 68 |
+
string id = 1;
|
| 69 |
+
string task_id = 11;
|
| 70 |
+
string server_message_data = 7;
|
| 71 |
+
repeated Citation citations = 8;
|
| 72 |
+
|
| 73 |
+
oneof message {
|
| 74 |
+
UserQuery user_query = 2;
|
| 75 |
+
AgentOutput agent_output = 3;
|
| 76 |
+
ToolCall tool_call = 4;
|
| 77 |
+
ToolCallResult tool_call_result = 5;
|
| 78 |
+
ServerEvent server_event = 6;
|
| 79 |
+
SystemQuery system_query = 9;
|
| 80 |
+
UpdateTodos update_todos = 10;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
message UserQuery {
|
| 84 |
+
string query = 1;
|
| 85 |
+
InputContext context = 2;
|
| 86 |
+
map<string, Attachment> referenced_attachments = 3;
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
message SystemQuery {
|
| 90 |
+
InputContext context = 2;
|
| 91 |
+
|
| 92 |
+
oneof type {
|
| 93 |
+
AutoCodeDiff auto_code_diff = 1;
|
| 94 |
+
ResumeConversation resume_conversation = 3;
|
| 95 |
+
}
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
message AutoCodeDiff {
|
| 99 |
+
string query = 1;
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
message ResumeConversation {
|
| 103 |
+
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
message AgentOutput {
|
| 107 |
+
string text = 1;
|
| 108 |
+
string reasoning = 2;
|
| 109 |
+
}
|
| 110 |
+
|
| 111 |
+
message ToolCall {
|
| 112 |
+
string tool_call_id = 1;
|
| 113 |
+
|
| 114 |
+
oneof tool {
|
| 115 |
+
RunShellCommand run_shell_command = 2;
|
| 116 |
+
SearchCodebase search_codebase = 3;
|
| 117 |
+
Server server = 4;
|
| 118 |
+
ReadFiles read_files = 5;
|
| 119 |
+
ApplyFileDiffs apply_file_diffs = 6;
|
| 120 |
+
SuggestPlan suggest_plan = 7;
|
| 121 |
+
SuggestCreatePlan suggest_create_plan = 8;
|
| 122 |
+
Grep grep = 9;
|
| 123 |
+
FileGlob file_glob = 10 [deprecated = true];
|
| 124 |
+
ReadMCPResource read_mcp_resource = 11;
|
| 125 |
+
CallMCPTool call_mcp_tool = 12;
|
| 126 |
+
WriteToLongRunningShellCommand write_to_long_running_shell_command = 13;
|
| 127 |
+
SuggestNewConversation suggest_new_conversation = 14;
|
| 128 |
+
FileGlobV2 file_glob_v2 = 15;
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
message Server {
|
| 132 |
+
string payload = 1;
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
message RunShellCommand {
|
| 136 |
+
string command = 1;
|
| 137 |
+
bool is_read_only = 2;
|
| 138 |
+
bool uses_pager = 3;
|
| 139 |
+
repeated Citation citations = 4;
|
| 140 |
+
bool is_risky = 5;
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
message WriteToLongRunningShellCommand {
|
| 144 |
+
bytes input = 1;
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
message SuggestNewConversation {
|
| 148 |
+
string message_id = 1;
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
message ReadFiles {
|
| 152 |
+
repeated File files = 1;
|
| 153 |
+
message File {
|
| 154 |
+
string name = 1;
|
| 155 |
+
repeated FileContentLineRange line_ranges = 2;
|
| 156 |
+
}
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
message SearchCodebase {
|
| 160 |
+
string query = 1;
|
| 161 |
+
repeated string path_filters = 2;
|
| 162 |
+
string codebase_path = 3;
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
message ApplyFileDiffs {
|
| 166 |
+
string summary = 1;
|
| 167 |
+
|
| 168 |
+
repeated FileDiff diffs = 2;
|
| 169 |
+
message FileDiff {
|
| 170 |
+
string file_path = 1;
|
| 171 |
+
string search = 2;
|
| 172 |
+
string replace = 3;
|
| 173 |
+
}
|
| 174 |
+
|
| 175 |
+
repeated NewFile new_files = 3;
|
| 176 |
+
message NewFile {
|
| 177 |
+
string file_path = 1;
|
| 178 |
+
string content = 2;
|
| 179 |
+
}
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
message SuggestPlan {
|
| 183 |
+
string summary = 1;
|
| 184 |
+
repeated Task proposed_tasks = 2;
|
| 185 |
+
}
|
| 186 |
+
|
| 187 |
+
message SuggestCreatePlan {
|
| 188 |
+
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
message Grep {
|
| 192 |
+
repeated string queries = 1;
|
| 193 |
+
string path = 2;
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
message FileGlob {
|
| 197 |
+
repeated string patterns = 1;
|
| 198 |
+
string path = 2;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
message FileGlobV2 {
|
| 202 |
+
repeated string patterns = 1;
|
| 203 |
+
string search_dir = 2;
|
| 204 |
+
int32 max_matches = 3;
|
| 205 |
+
int32 max_depth = 4;
|
| 206 |
+
int32 min_depth = 5;
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
message ReadMCPResource {
|
| 210 |
+
string uri = 1;
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
message CallMCPTool {
|
| 214 |
+
string name = 1;
|
| 215 |
+
google.protobuf.Struct args = 2;
|
| 216 |
+
}
|
| 217 |
+
}
|
| 218 |
+
|
| 219 |
+
message ToolCallResult {
|
| 220 |
+
string tool_call_id = 1;
|
| 221 |
+
InputContext context = 11;
|
| 222 |
+
|
| 223 |
+
oneof result {
|
| 224 |
+
RunShellCommandResult run_shell_command = 2;
|
| 225 |
+
SearchCodebaseResult search_codebase = 3;
|
| 226 |
+
ServerResult server = 4;
|
| 227 |
+
ReadFilesResult read_files = 5;
|
| 228 |
+
ApplyFileDiffsResult apply_file_diffs = 6;
|
| 229 |
+
SuggestPlanResult suggest_plan = 7;
|
| 230 |
+
SuggestCreatePlanResult suggest_create_plan = 8;
|
| 231 |
+
GrepResult grep = 9;
|
| 232 |
+
FileGlobResult file_glob = 10 [deprecated = true];
|
| 233 |
+
RefineResult refine = 13;
|
| 234 |
+
google.protobuf.Empty cancel = 14;
|
| 235 |
+
ReadMCPResourceResult read_mcp_resource = 15;
|
| 236 |
+
CallMCPToolResult call_mcp_tool = 16;
|
| 237 |
+
WriteToLongRunningShellCommandResult write_to_long_running_shell_command = 17;
|
| 238 |
+
SuggestNewConversationResult suggest_new_conversation = 18;
|
| 239 |
+
FileGlobV2Result file_glob_v2 = 19;
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
message ServerResult {
|
| 243 |
+
string serialized_result = 1;
|
| 244 |
+
}
|
| 245 |
+
|
| 246 |
+
message RefineResult {
|
| 247 |
+
UserQuery user_query = 1;
|
| 248 |
+
}
|
| 249 |
+
}
|
| 250 |
+
|
| 251 |
+
message ServerEvent {
|
| 252 |
+
string payload = 1;
|
| 253 |
+
}
|
| 254 |
+
|
| 255 |
+
message UpdateTodos {
|
| 256 |
+
oneof operation {
|
| 257 |
+
CreateTodoList create_todo_list = 1;
|
| 258 |
+
UpdatePendingTodos update_pending_todos = 2;
|
| 259 |
+
MarkTodosCompleted mark_todos_completed = 3;
|
| 260 |
+
}
|
| 261 |
+
}
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
message RunShellCommandResult {
|
| 265 |
+
string command = 3;
|
| 266 |
+
string output = 1 [deprecated = true];
|
| 267 |
+
int32 exit_code = 2 [deprecated = true];
|
| 268 |
+
|
| 269 |
+
oneof result {
|
| 270 |
+
LongRunningShellCommandSnapshot long_running_command_snapshot = 4;
|
| 271 |
+
ShellCommandFinished command_finished = 5;
|
| 272 |
+
}
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
message ReadFilesResult {
|
| 276 |
+
oneof result {
|
| 277 |
+
Success success = 1;
|
| 278 |
+
Error error = 2;
|
| 279 |
+
}
|
| 280 |
+
|
| 281 |
+
message Success {
|
| 282 |
+
repeated FileContent files = 1;
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
message Error {
|
| 286 |
+
string message = 1;
|
| 287 |
+
}
|
| 288 |
+
}
|
| 289 |
+
|
| 290 |
+
message SearchCodebaseResult {
|
| 291 |
+
oneof result {
|
| 292 |
+
Success success = 1;
|
| 293 |
+
Error error = 2;
|
| 294 |
+
}
|
| 295 |
+
|
| 296 |
+
message Success {
|
| 297 |
+
repeated FileContent files = 1;
|
| 298 |
+
}
|
| 299 |
+
|
| 300 |
+
message Error {
|
| 301 |
+
string message = 1;
|
| 302 |
+
}
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
message ApplyFileDiffsResult {
|
| 306 |
+
oneof result {
|
| 307 |
+
Success success = 1;
|
| 308 |
+
Error error = 2;
|
| 309 |
+
}
|
| 310 |
+
|
| 311 |
+
message Success {
|
| 312 |
+
repeated FileContent updated_files = 1 [deprecated = true];
|
| 313 |
+
|
| 314 |
+
repeated UpdatedFileContent updated_files_v2 = 2;
|
| 315 |
+
message UpdatedFileContent {
|
| 316 |
+
FileContent file = 1;
|
| 317 |
+
bool was_edited_by_user = 2;
|
| 318 |
+
}
|
| 319 |
+
}
|
| 320 |
+
|
| 321 |
+
message Error {
|
| 322 |
+
string message = 1;
|
| 323 |
+
}
|
| 324 |
+
}
|
| 325 |
+
|
| 326 |
+
message SuggestCreatePlanResult {
|
| 327 |
+
bool accepted = 1;
|
| 328 |
+
}
|
| 329 |
+
|
| 330 |
+
message SuggestPlanResult {
|
| 331 |
+
oneof result {
|
| 332 |
+
google.protobuf.Empty accepted = 1;
|
| 333 |
+
UserEditedPlan user_edited_plan = 2;
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
message UserEditedPlan {
|
| 337 |
+
string plan_text = 1;
|
| 338 |
+
}
|
| 339 |
+
}
|
| 340 |
+
|
| 341 |
+
message GrepResult {
|
| 342 |
+
oneof result {
|
| 343 |
+
Success success = 1;
|
| 344 |
+
Error error = 2;
|
| 345 |
+
}
|
| 346 |
+
|
| 347 |
+
message Success {
|
| 348 |
+
repeated GrepFileMatch matched_files = 1;
|
| 349 |
+
message GrepFileMatch {
|
| 350 |
+
string file_path = 1;
|
| 351 |
+
|
| 352 |
+
repeated GrepLineMatch matched_lines = 2;
|
| 353 |
+
message GrepLineMatch {
|
| 354 |
+
uint32 line_number = 1;
|
| 355 |
+
}
|
| 356 |
+
}
|
| 357 |
+
}
|
| 358 |
+
|
| 359 |
+
message Error {
|
| 360 |
+
string message = 1;
|
| 361 |
+
}
|
| 362 |
+
}
|
| 363 |
+
|
| 364 |
+
message FileGlobResult {
|
| 365 |
+
oneof result {
|
| 366 |
+
Success success = 1;
|
| 367 |
+
Error error = 2;
|
| 368 |
+
}
|
| 369 |
+
|
| 370 |
+
message Success {
|
| 371 |
+
string matched_files = 1;
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
message Error {
|
| 375 |
+
string message = 1;
|
| 376 |
+
}
|
| 377 |
+
}
|
| 378 |
+
|
| 379 |
+
message FileGlobV2Result {
|
| 380 |
+
oneof result {
|
| 381 |
+
Success success = 1;
|
| 382 |
+
Error error = 2;
|
| 383 |
+
}
|
| 384 |
+
|
| 385 |
+
message Success {
|
| 386 |
+
repeated FileGlobMatch matched_files = 1;
|
| 387 |
+
message FileGlobMatch {
|
| 388 |
+
string file_path = 1;
|
| 389 |
+
}
|
| 390 |
+
}
|
| 391 |
+
|
| 392 |
+
message Error {
|
| 393 |
+
string message = 1;
|
| 394 |
+
}
|
| 395 |
+
}
|
| 396 |
+
|
| 397 |
+
message MCPResourceContent {
|
| 398 |
+
string uri = 1;
|
| 399 |
+
|
| 400 |
+
oneof content_type {
|
| 401 |
+
Text text = 2;
|
| 402 |
+
Binary binary = 3;
|
| 403 |
+
}
|
| 404 |
+
|
| 405 |
+
message Text {
|
| 406 |
+
string content = 1;
|
| 407 |
+
string mime_type = 2;
|
| 408 |
+
}
|
| 409 |
+
|
| 410 |
+
message Binary {
|
| 411 |
+
bytes data = 1;
|
| 412 |
+
string mime_type = 2;
|
| 413 |
+
}
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
message ReadMCPResourceResult {
|
| 417 |
+
oneof result {
|
| 418 |
+
Success success = 1;
|
| 419 |
+
Error error = 2;
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
message Success {
|
| 423 |
+
repeated MCPResourceContent contents = 1;
|
| 424 |
+
}
|
| 425 |
+
|
| 426 |
+
message Error {
|
| 427 |
+
string message = 1;
|
| 428 |
+
}
|
| 429 |
+
}
|
| 430 |
+
|
| 431 |
+
message WriteToLongRunningShellCommandResult {
|
| 432 |
+
oneof result {
|
| 433 |
+
LongRunningShellCommandSnapshot long_running_command_snapshot = 1;
|
| 434 |
+
ShellCommandFinished command_finished = 2;
|
| 435 |
+
}
|
| 436 |
+
}
|
| 437 |
+
|
| 438 |
+
message SuggestNewConversationResult {
|
| 439 |
+
oneof result {
|
| 440 |
+
Accepted accepted = 1;
|
| 441 |
+
Rejected rejected = 2;
|
| 442 |
+
}
|
| 443 |
+
|
| 444 |
+
message Accepted {
|
| 445 |
+
string message_id = 1;
|
| 446 |
+
}
|
| 447 |
+
|
| 448 |
+
message Rejected {
|
| 449 |
+
|
| 450 |
+
}
|
| 451 |
+
}
|
| 452 |
+
|
| 453 |
+
message ShellCommandFinished {
|
| 454 |
+
string output = 1;
|
| 455 |
+
int32 exit_code = 2;
|
| 456 |
+
}
|
| 457 |
+
|
| 458 |
+
message CallMCPToolResult {
|
| 459 |
+
oneof result {
|
| 460 |
+
Success success = 1;
|
| 461 |
+
Error error = 2;
|
| 462 |
+
}
|
| 463 |
+
|
| 464 |
+
message Success {
|
| 465 |
+
repeated Result results = 1;
|
| 466 |
+
message Result {
|
| 467 |
+
oneof result {
|
| 468 |
+
Text text = 1;
|
| 469 |
+
Image image = 2;
|
| 470 |
+
MCPResourceContent resource = 3;
|
| 471 |
+
}
|
| 472 |
+
|
| 473 |
+
message Text {
|
| 474 |
+
string text = 1;
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
message Image {
|
| 478 |
+
bytes data = 1;
|
| 479 |
+
string mime_type = 2;
|
| 480 |
+
}
|
| 481 |
+
}
|
| 482 |
+
}
|
| 483 |
+
|
| 484 |
+
message Error {
|
| 485 |
+
string message = 1;
|
| 486 |
+
}
|
| 487 |
+
}
|
| 488 |
+
|
| 489 |
+
enum ToolType {
|
| 490 |
+
RUN_SHELL_COMMAND = 0;
|
| 491 |
+
SEARCH_CODEBASE = 1;
|
| 492 |
+
READ_FILES = 2;
|
| 493 |
+
APPLY_FILE_DIFFS = 3;
|
| 494 |
+
SUGGEST_PLAN = 4;
|
| 495 |
+
SUGGEST_CREATE_PLAN = 5;
|
| 496 |
+
GREP = 6;
|
| 497 |
+
FILE_GLOB = 7;
|
| 498 |
+
READ_MCP_RESOURCE = 8;
|
| 499 |
+
CALL_MCP_TOOL = 9;
|
| 500 |
+
WRITE_TO_LONG_RUNNING_SHELL_COMMAND = 10;
|
| 501 |
+
SUGGEST_NEW_CONVERSATION = 11;
|
| 502 |
+
FILE_GLOB_V2 = 12;
|
| 503 |
+
}
|
proto/todo.proto
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
syntax = "proto3";
|
| 2 |
+
|
| 3 |
+
package warp.multi_agent.v1;
|
| 4 |
+
|
| 5 |
+
option go_package = "github.com/warp/warp-proto-apis/multi_agent/v1";
|
| 6 |
+
|
| 7 |
+
message TodoItem {
|
| 8 |
+
string id = 1;
|
| 9 |
+
string title = 2;
|
| 10 |
+
string description = 3;
|
| 11 |
+
}
|
| 12 |
+
|
| 13 |
+
message CreateTodoList {
|
| 14 |
+
repeated TodoItem initial_todos = 1;
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
message UpdatePendingTodos {
|
| 18 |
+
repeated TodoItem updated_pending_todos = 1;
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
message MarkTodosCompleted {
|
| 22 |
+
repeated string todo_ids = 1;
|
| 23 |
+
}
|
protobuf2openai/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Package for converting between Warp protobuf JSON and OpenAI Chat Completions API
|
| 2 |
+
|
| 3 |
+
__all__ = []
|
protobuf2openai/app.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import json
|
| 5 |
+
|
| 6 |
+
import httpx
|
| 7 |
+
from fastapi import FastAPI
|
| 8 |
+
|
| 9 |
+
from .logging import logger
|
| 10 |
+
|
| 11 |
+
from .config import BRIDGE_BASE_URL, WARMUP_INIT_RETRIES, WARMUP_INIT_DELAY_S
|
| 12 |
+
from .bridge import initialize_once
|
| 13 |
+
from .router import router
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
app = FastAPI(title="OpenAI Chat Completions (Warp bridge) - Streaming")
|
| 17 |
+
app.include_router(router)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@app.on_event("startup")
|
| 21 |
+
async def _on_startup():
|
| 22 |
+
try:
|
| 23 |
+
logger.info("[OpenAI Compat] Server starting. BRIDGE_BASE_URL=%s", BRIDGE_BASE_URL)
|
| 24 |
+
logger.info("[OpenAI Compat] Endpoints: GET /healthz, GET /v1/models, POST /v1/chat/completions")
|
| 25 |
+
except Exception:
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
url = f"{BRIDGE_BASE_URL}/healthz"
|
| 29 |
+
retries = WARMUP_INIT_RETRIES
|
| 30 |
+
delay_s = WARMUP_INIT_DELAY_S
|
| 31 |
+
for attempt in range(1, retries + 1):
|
| 32 |
+
try:
|
| 33 |
+
async with httpx.AsyncClient(timeout=5.0, trust_env=True) as client:
|
| 34 |
+
resp = await client.get(url)
|
| 35 |
+
if resp.status_code == 200:
|
| 36 |
+
logger.info("[OpenAI Compat] Bridge server is ready at %s", url)
|
| 37 |
+
break
|
| 38 |
+
else:
|
| 39 |
+
logger.warning("[OpenAI Compat] Bridge health at %s -> HTTP %s", url, resp.status_code)
|
| 40 |
+
except Exception as e:
|
| 41 |
+
logger.warning("[OpenAI Compat] Bridge health attempt %s/%s failed: %s", attempt, retries, e)
|
| 42 |
+
await asyncio.sleep(delay_s)
|
| 43 |
+
else:
|
| 44 |
+
logger.error("[OpenAI Compat] Bridge server not ready at %s", url)
|
| 45 |
+
|
| 46 |
+
try:
|
| 47 |
+
await asyncio.to_thread(initialize_once)
|
| 48 |
+
except Exception as e:
|
| 49 |
+
logger.warning(f"[OpenAI Compat] Warmup initialize_once on startup failed: {e}")
|
protobuf2openai/bridge.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import time
|
| 5 |
+
import uuid
|
| 6 |
+
from typing import Any, Dict, Optional
|
| 7 |
+
|
| 8 |
+
import requests
|
| 9 |
+
from .logging import logger
|
| 10 |
+
|
| 11 |
+
from .config import (
|
| 12 |
+
BRIDGE_BASE_URL,
|
| 13 |
+
FALLBACK_BRIDGE_URLS,
|
| 14 |
+
WARMUP_INIT_RETRIES,
|
| 15 |
+
WARMUP_INIT_DELAY_S,
|
| 16 |
+
WARMUP_REQUEST_RETRIES,
|
| 17 |
+
WARMUP_REQUEST_DELAY_S,
|
| 18 |
+
)
|
| 19 |
+
from .packets import packet_template
|
| 20 |
+
from .state import STATE, ensure_tool_ids
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def bridge_send_stream(packet: Dict[str, Any]) -> Dict[str, Any]:
|
| 24 |
+
last_exc: Optional[Exception] = None
|
| 25 |
+
for base in FALLBACK_BRIDGE_URLS:
|
| 26 |
+
url = f"{base}/api/warp/send_stream"
|
| 27 |
+
try:
|
| 28 |
+
wrapped_packet = {"json_data": packet, "message_type": "warp.multi_agent.v1.Request"}
|
| 29 |
+
try:
|
| 30 |
+
logger.info("[OpenAI Compat] Bridge request URL: %s", url)
|
| 31 |
+
logger.info("[OpenAI Compat] Bridge request payload: %s", json.dumps(wrapped_packet, ensure_ascii=False))
|
| 32 |
+
except Exception:
|
| 33 |
+
logger.info("[OpenAI Compat] Bridge request payload serialization failed for URL %s", url)
|
| 34 |
+
r = requests.post(url, json=wrapped_packet, timeout=(5.0, 180.0))
|
| 35 |
+
if r.status_code == 200:
|
| 36 |
+
try:
|
| 37 |
+
logger.info("[OpenAI Compat] Bridge response (raw text): %s", r.text)
|
| 38 |
+
except Exception:
|
| 39 |
+
pass
|
| 40 |
+
return r.json()
|
| 41 |
+
else:
|
| 42 |
+
txt = r.text
|
| 43 |
+
last_exc = Exception(f"bridge_error: HTTP {r.status_code} {txt}")
|
| 44 |
+
except Exception as e:
|
| 45 |
+
last_exc = e
|
| 46 |
+
continue
|
| 47 |
+
if last_exc:
|
| 48 |
+
raise last_exc
|
| 49 |
+
raise Exception("bridge_unreachable")
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def initialize_once() -> None:
|
| 53 |
+
if STATE.conversation_id:
|
| 54 |
+
return
|
| 55 |
+
|
| 56 |
+
ensure_tool_ids()
|
| 57 |
+
|
| 58 |
+
first_task_id = STATE.baseline_task_id or str(uuid.uuid4())
|
| 59 |
+
STATE.baseline_task_id = first_task_id
|
| 60 |
+
|
| 61 |
+
health_urls = [f"{base}/healthz" for base in FALLBACK_BRIDGE_URLS]
|
| 62 |
+
last_err: Optional[str] = None
|
| 63 |
+
for _ in range(WARMUP_INIT_RETRIES):
|
| 64 |
+
try:
|
| 65 |
+
ok = False
|
| 66 |
+
last_err = None
|
| 67 |
+
for h in health_urls:
|
| 68 |
+
try:
|
| 69 |
+
resp = requests.get(h, timeout=5.0)
|
| 70 |
+
if resp.status_code == 200:
|
| 71 |
+
ok = True
|
| 72 |
+
break
|
| 73 |
+
else:
|
| 74 |
+
last_err = f"HTTP {resp.status_code} at {h}"
|
| 75 |
+
except Exception as he:
|
| 76 |
+
last_err = f"{type(he).__name__}: {he} at {h}"
|
| 77 |
+
if ok:
|
| 78 |
+
break
|
| 79 |
+
except Exception as e:
|
| 80 |
+
last_err = str(e)
|
| 81 |
+
time.sleep(WARMUP_INIT_DELAY_S)
|
| 82 |
+
else:
|
| 83 |
+
raise RuntimeError(f"Bridge server not ready: {last_err}")
|
| 84 |
+
|
| 85 |
+
pkt = packet_template()
|
| 86 |
+
pkt["task_context"]["active_task_id"] = first_task_id
|
| 87 |
+
pkt["input"]["user_inputs"]["inputs"].append({"user_query": {"query": "warmup"}})
|
| 88 |
+
|
| 89 |
+
last_exc: Optional[Exception] = None
|
| 90 |
+
for attempt in range(1, WARMUP_REQUEST_RETRIES + 1):
|
| 91 |
+
try:
|
| 92 |
+
resp = bridge_send_stream(pkt)
|
| 93 |
+
break
|
| 94 |
+
except Exception as e:
|
| 95 |
+
last_exc = e
|
| 96 |
+
logger.warning(f"[OpenAI Compat] Warmup attempt {attempt}/{WARMUP_REQUEST_RETRIES} failed: {e}")
|
| 97 |
+
if attempt < WARMUP_REQUEST_RETRIES:
|
| 98 |
+
time.sleep(WARMUP_REQUEST_DELAY_S)
|
| 99 |
+
else:
|
| 100 |
+
raise
|
| 101 |
+
|
| 102 |
+
STATE.conversation_id = resp.get("conversation_id") or STATE.conversation_id
|
| 103 |
+
ret_task_id = resp.get("task_id")
|
| 104 |
+
if isinstance(ret_task_id, str) and ret_task_id:
|
| 105 |
+
STATE.baseline_task_id = ret_task_id
|
protobuf2openai/config.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
BRIDGE_BASE_URL = os.getenv("WARP_BRIDGE_URL", "http://127.0.0.1:8000")
|
| 6 |
+
FALLBACK_BRIDGE_URLS = [
|
| 7 |
+
BRIDGE_BASE_URL,
|
| 8 |
+
"http://127.0.0.1:8000",
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
WARMUP_INIT_RETRIES = int(os.getenv("WARP_COMPAT_INIT_RETRIES", "10"))
|
| 12 |
+
WARMUP_INIT_DELAY_S = float(os.getenv("WARP_COMPAT_INIT_DELAY", "0.5"))
|
| 13 |
+
WARMUP_REQUEST_RETRIES = int(os.getenv("WARP_COMPAT_WARMUP_RETRIES", "3"))
|
| 14 |
+
WARMUP_REQUEST_DELAY_S = float(os.getenv("WARP_COMPAT_WARMUP_DELAY", "1.5"))
|
protobuf2openai/helpers.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Any, Dict, List
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def _get(d: Dict[str, Any], *names: str) -> Any:
|
| 7 |
+
for n in names:
|
| 8 |
+
if isinstance(d, dict) and n in d:
|
| 9 |
+
return d[n]
|
| 10 |
+
return None
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def normalize_content_to_list(content: Any) -> List[Dict[str, Any]]:
|
| 14 |
+
segments: List[Dict[str, Any]] = []
|
| 15 |
+
try:
|
| 16 |
+
if isinstance(content, str):
|
| 17 |
+
return [{"type": "text", "text": content}]
|
| 18 |
+
if isinstance(content, list):
|
| 19 |
+
for item in content:
|
| 20 |
+
if isinstance(item, dict):
|
| 21 |
+
t = item.get("type") or ("text" if isinstance(item.get("text"), str) else None)
|
| 22 |
+
if t == "text" and isinstance(item.get("text"), str):
|
| 23 |
+
segments.append({"type": "text", "text": item.get("text")})
|
| 24 |
+
else:
|
| 25 |
+
seg: Dict[str, Any] = {}
|
| 26 |
+
if t:
|
| 27 |
+
seg["type"] = t
|
| 28 |
+
if isinstance(item.get("text"), str):
|
| 29 |
+
seg["text"] = item.get("text")
|
| 30 |
+
if seg:
|
| 31 |
+
segments.append(seg)
|
| 32 |
+
return segments
|
| 33 |
+
if isinstance(content, dict):
|
| 34 |
+
if isinstance(content.get("text"), str):
|
| 35 |
+
return [{"type": "text", "text": content.get("text")}]
|
| 36 |
+
except Exception:
|
| 37 |
+
return []
|
| 38 |
+
return []
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def segments_to_text(segments: List[Dict[str, Any]]) -> str:
|
| 42 |
+
parts: List[str] = []
|
| 43 |
+
for seg in segments:
|
| 44 |
+
if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str):
|
| 45 |
+
parts.append(seg.get("text") or "")
|
| 46 |
+
return "".join(parts)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def segments_to_warp_results(segments: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
| 50 |
+
results: List[Dict[str, Any]] = []
|
| 51 |
+
for seg in segments:
|
| 52 |
+
if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str):
|
| 53 |
+
results.append({"text": {"text": seg.get("text")}})
|
| 54 |
+
return results
|
protobuf2openai/logging.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Local logging for protobuf2openai package to avoid cross-package dependencies.
|
| 5 |
+
"""
|
| 6 |
+
import logging
|
| 7 |
+
from logging.handlers import RotatingFileHandler
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
|
| 10 |
+
LOG_DIR = Path("logs")
|
| 11 |
+
LOG_DIR.mkdir(exist_ok=True)
|
| 12 |
+
|
| 13 |
+
_logger = logging.getLogger("protobuf2openai")
|
| 14 |
+
_logger.setLevel(logging.INFO)
|
| 15 |
+
|
| 16 |
+
# Remove existing handlers to prevent duplication
|
| 17 |
+
for h in _logger.handlers[:]:
|
| 18 |
+
_logger.removeHandler(h)
|
| 19 |
+
|
| 20 |
+
file_handler = RotatingFileHandler(LOG_DIR / "openai_compat.log", maxBytes=5*1024*1024, backupCount=3, encoding="utf-8")
|
| 21 |
+
file_handler.setLevel(logging.INFO)
|
| 22 |
+
console_handler = logging.StreamHandler()
|
| 23 |
+
console_handler.setLevel(logging.INFO)
|
| 24 |
+
|
| 25 |
+
fmt = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s')
|
| 26 |
+
file_handler.setFormatter(fmt)
|
| 27 |
+
console_handler.setFormatter(fmt)
|
| 28 |
+
|
| 29 |
+
_logger.addHandler(file_handler)
|
| 30 |
+
_logger.addHandler(console_handler)
|
| 31 |
+
|
| 32 |
+
logger = _logger
|
protobuf2openai/models.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Any, Dict, List, Optional, Union
|
| 4 |
+
from pydantic import BaseModel, Field
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ChatMessage(BaseModel):
|
| 8 |
+
role: str
|
| 9 |
+
content: Optional[Union[str, List[Dict[str, Any]]]] = ""
|
| 10 |
+
tool_call_id: Optional[str] = None
|
| 11 |
+
tool_calls: Optional[List[Dict[str, Any]]] = None
|
| 12 |
+
name: Optional[str] = None
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class OpenAIFunctionDef(BaseModel):
|
| 16 |
+
name: str
|
| 17 |
+
description: Optional[str] = None
|
| 18 |
+
parameters: Optional[Dict[str, Any]] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class OpenAITool(BaseModel):
|
| 22 |
+
type: str = Field("function", description="Only 'function' is supported")
|
| 23 |
+
function: OpenAIFunctionDef
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ChatCompletionsRequest(BaseModel):
|
| 27 |
+
model: Optional[str] = None
|
| 28 |
+
messages: List[ChatMessage]
|
| 29 |
+
stream: Optional[bool] = False
|
| 30 |
+
tools: Optional[List[OpenAITool]] = None
|
| 31 |
+
tool_choice: Optional[Any] = None
|
protobuf2openai/packets.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import uuid
|
| 4 |
+
from typing import Any, Dict, List, Optional
|
| 5 |
+
import json
|
| 6 |
+
|
| 7 |
+
from .state import STATE, ensure_tool_ids
|
| 8 |
+
from .helpers import normalize_content_to_list, segments_to_text, segments_to_warp_results
|
| 9 |
+
from .models import ChatMessage
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def packet_template() -> Dict[str, Any]:
|
| 13 |
+
return {
|
| 14 |
+
"task_context": {"active_task_id": ""},
|
| 15 |
+
"input": {"context": {}, "user_inputs": {"inputs": []}},
|
| 16 |
+
"settings": {
|
| 17 |
+
"model_config": {
|
| 18 |
+
"base": "claude-4.1-opus",
|
| 19 |
+
"planning": "gpt-5 (high reasoning)",
|
| 20 |
+
"coding": "auto",
|
| 21 |
+
},
|
| 22 |
+
"rules_enabled": False,
|
| 23 |
+
"web_context_retrieval_enabled": False,
|
| 24 |
+
"supports_parallel_tool_calls": False,
|
| 25 |
+
"planning_enabled": False,
|
| 26 |
+
"warp_drive_context_enabled": False,
|
| 27 |
+
"supports_create_files": False,
|
| 28 |
+
"use_anthropic_text_editor_tools": False,
|
| 29 |
+
"supports_long_running_commands": False,
|
| 30 |
+
"should_preserve_file_content_in_history": False,
|
| 31 |
+
"supports_todos_ui": False,
|
| 32 |
+
"supports_linked_code_blocks": False,
|
| 33 |
+
"supported_tools": [9],
|
| 34 |
+
},
|
| 35 |
+
"metadata": {"logging": {"is_autodetected_user_query": True, "entrypoint": "USER_INITIATED"}},
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def map_history_to_warp_messages(history: List[ChatMessage], task_id: str, system_prompt_for_last_user: Optional[str] = None, attach_to_history_last_user: bool = False) -> List[Dict[str, Any]]:
|
| 40 |
+
ensure_tool_ids()
|
| 41 |
+
msgs: List[Dict[str, Any]] = []
|
| 42 |
+
# Insert server tool_call preamble as first message
|
| 43 |
+
msgs.append({
|
| 44 |
+
"id": (STATE.tool_message_id or str(uuid.uuid4())),
|
| 45 |
+
"task_id": task_id,
|
| 46 |
+
"tool_call": {
|
| 47 |
+
"tool_call_id": (STATE.tool_call_id or str(uuid.uuid4())),
|
| 48 |
+
"server": {"payload": "IgIQAQ=="},
|
| 49 |
+
},
|
| 50 |
+
})
|
| 51 |
+
|
| 52 |
+
# Determine the last input message index (either last 'user' or last 'tool' with tool_call_id)
|
| 53 |
+
last_input_index: Optional[int] = None
|
| 54 |
+
for idx in range(len(history) - 1, -1, -1):
|
| 55 |
+
_m = history[idx]
|
| 56 |
+
if _m.role == "user":
|
| 57 |
+
last_input_index = idx
|
| 58 |
+
break
|
| 59 |
+
if _m.role == "tool" and _m.tool_call_id:
|
| 60 |
+
last_input_index = idx
|
| 61 |
+
break
|
| 62 |
+
|
| 63 |
+
for i, m in enumerate(history):
|
| 64 |
+
mid = str(uuid.uuid4())
|
| 65 |
+
# Skip the final input message; it will be placed into input.user_inputs
|
| 66 |
+
if (last_input_index is not None) and (i == last_input_index):
|
| 67 |
+
continue
|
| 68 |
+
if m.role == "user":
|
| 69 |
+
user_query_obj: Dict[str, Any] = {"query": segments_to_text(normalize_content_to_list(m.content))}
|
| 70 |
+
msgs.append({"id": mid, "task_id": task_id, "user_query": user_query_obj})
|
| 71 |
+
elif m.role == "assistant":
|
| 72 |
+
_assistant_text = segments_to_text(normalize_content_to_list(m.content))
|
| 73 |
+
if _assistant_text:
|
| 74 |
+
msgs.append({"id": mid, "task_id": task_id, "agent_output": {"text": _assistant_text}})
|
| 75 |
+
for tc in (m.tool_calls or []):
|
| 76 |
+
msgs.append({
|
| 77 |
+
"id": str(uuid.uuid4()),
|
| 78 |
+
"task_id": task_id,
|
| 79 |
+
"tool_call": {
|
| 80 |
+
"tool_call_id": tc.get("id") or str(uuid.uuid4()),
|
| 81 |
+
"call_mcp_tool": {
|
| 82 |
+
"name": (tc.get("function", {}) or {}).get("name", ""),
|
| 83 |
+
"args": (json.loads((tc.get("function", {}) or {}).get("arguments", "{}")) if isinstance((tc.get("function", {}) or {}).get("arguments"), str) else (tc.get("function", {}) or {}).get("arguments", {})) or {},
|
| 84 |
+
},
|
| 85 |
+
},
|
| 86 |
+
})
|
| 87 |
+
elif m.role == "tool":
|
| 88 |
+
# Preserve tool_result adjacency by placing it directly in task_context
|
| 89 |
+
if m.tool_call_id:
|
| 90 |
+
msgs.append({
|
| 91 |
+
"id": str(uuid.uuid4()),
|
| 92 |
+
"task_id": task_id,
|
| 93 |
+
"tool_call_result": {
|
| 94 |
+
"tool_call_id": m.tool_call_id,
|
| 95 |
+
"call_mcp_tool": {
|
| 96 |
+
"success": {
|
| 97 |
+
"results": segments_to_warp_results(normalize_content_to_list(m.content))
|
| 98 |
+
}
|
| 99 |
+
},
|
| 100 |
+
},
|
| 101 |
+
})
|
| 102 |
+
return msgs
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def attach_user_and_tools_to_inputs(packet: Dict[str, Any], history: List[ChatMessage], system_prompt_text: Optional[str]) -> None:
|
| 106 |
+
# Use the final post-reorder message as input (user or tool result)
|
| 107 |
+
if not history:
|
| 108 |
+
assert False, "post-reorder 必须至少包含一条消息"
|
| 109 |
+
last = history[-1]
|
| 110 |
+
if last.role == "user":
|
| 111 |
+
user_query_payload: Dict[str, Any] = {"query": segments_to_text(normalize_content_to_list(last.content))}
|
| 112 |
+
if system_prompt_text:
|
| 113 |
+
user_query_payload["referenced_attachments"] = {
|
| 114 |
+
"SYSTEM_PROMPT": {
|
| 115 |
+
"plain_text": f"""<ALERT>you are not allowed to call following tools: - `read_files`
|
| 116 |
+
- `write_files`
|
| 117 |
+
- `run_commands`
|
| 118 |
+
- `list_files`
|
| 119 |
+
- `str_replace_editor`
|
| 120 |
+
- `ask_followup_question`
|
| 121 |
+
- `attempt_completion`</ALERT>{system_prompt_text}"""
|
| 122 |
+
}
|
| 123 |
+
}
|
| 124 |
+
packet["input"]["user_inputs"]["inputs"].append({"user_query": user_query_payload})
|
| 125 |
+
return
|
| 126 |
+
if last.role == "tool" and last.tool_call_id:
|
| 127 |
+
packet["input"]["user_inputs"]["inputs"].append({
|
| 128 |
+
"tool_call_result": {
|
| 129 |
+
"tool_call_id": last.tool_call_id,
|
| 130 |
+
"call_mcp_tool": {
|
| 131 |
+
"success": {"results": segments_to_warp_results(normalize_content_to_list(last.content))}
|
| 132 |
+
},
|
| 133 |
+
}
|
| 134 |
+
})
|
| 135 |
+
return
|
| 136 |
+
# If neither, assert to catch protocol violations
|
| 137 |
+
assert False, "post-reorder 最后一条必须是 user 或 tool 结果"
|
protobuf2openai/reorder.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Dict, List, Optional
|
| 4 |
+
from .models import ChatMessage
|
| 5 |
+
from .helpers import normalize_content_to_list, segments_to_text
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def reorder_messages_for_anthropic(history: List[ChatMessage]) -> List[ChatMessage]:
|
| 9 |
+
if not history:
|
| 10 |
+
return []
|
| 11 |
+
|
| 12 |
+
expanded: List[ChatMessage] = []
|
| 13 |
+
for m in history:
|
| 14 |
+
if m.role == "user":
|
| 15 |
+
items = normalize_content_to_list(m.content)
|
| 16 |
+
if isinstance(m.content, list) and len(items) > 1:
|
| 17 |
+
for seg in items:
|
| 18 |
+
if isinstance(seg, dict) and seg.get("type") == "text" and isinstance(seg.get("text"), str):
|
| 19 |
+
expanded.append(ChatMessage(role="user", content=seg.get("text")))
|
| 20 |
+
else:
|
| 21 |
+
expanded.append(ChatMessage(role="user", content=[seg] if isinstance(seg, dict) else seg))
|
| 22 |
+
else:
|
| 23 |
+
expanded.append(m)
|
| 24 |
+
elif m.role == "assistant" and m.tool_calls and len(m.tool_calls) > 1:
|
| 25 |
+
_assistant_text = segments_to_text(normalize_content_to_list(m.content))
|
| 26 |
+
if _assistant_text:
|
| 27 |
+
expanded.append(ChatMessage(role="assistant", content=_assistant_text))
|
| 28 |
+
for tc in (m.tool_calls or []):
|
| 29 |
+
expanded.append(ChatMessage(role="assistant", content=None, tool_calls=[tc]))
|
| 30 |
+
else:
|
| 31 |
+
expanded.append(m)
|
| 32 |
+
|
| 33 |
+
last_input_tool_id: Optional[str] = None
|
| 34 |
+
last_input_is_tool = False
|
| 35 |
+
for m in reversed(expanded):
|
| 36 |
+
if m.role == "tool" and m.tool_call_id:
|
| 37 |
+
last_input_tool_id = m.tool_call_id
|
| 38 |
+
last_input_is_tool = True
|
| 39 |
+
break
|
| 40 |
+
if m.role == "user":
|
| 41 |
+
break
|
| 42 |
+
|
| 43 |
+
tool_results_by_id: Dict[str, ChatMessage] = {}
|
| 44 |
+
assistant_tc_ids: set[str] = set()
|
| 45 |
+
for m in expanded:
|
| 46 |
+
if m.role == "tool" and m.tool_call_id and m.tool_call_id not in tool_results_by_id:
|
| 47 |
+
tool_results_by_id[m.tool_call_id] = m
|
| 48 |
+
if m.role == "assistant" and m.tool_calls:
|
| 49 |
+
try:
|
| 50 |
+
for tc in (m.tool_calls or []):
|
| 51 |
+
_id = (tc or {}).get("id")
|
| 52 |
+
if isinstance(_id, str) and _id:
|
| 53 |
+
assistant_tc_ids.add(_id)
|
| 54 |
+
except Exception:
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
result: List[ChatMessage] = []
|
| 58 |
+
trailing_assistant_msg: Optional[ChatMessage] = None
|
| 59 |
+
for m in expanded:
|
| 60 |
+
if m.role == "tool":
|
| 61 |
+
# Preserve unmatched tool results inline
|
| 62 |
+
if not m.tool_call_id or m.tool_call_id not in assistant_tc_ids:
|
| 63 |
+
result.append(m)
|
| 64 |
+
if m.tool_call_id:
|
| 65 |
+
tool_results_by_id.pop(m.tool_call_id, None)
|
| 66 |
+
continue
|
| 67 |
+
if m.role == "assistant" and m.tool_calls:
|
| 68 |
+
ids: List[str] = []
|
| 69 |
+
try:
|
| 70 |
+
for tc in (m.tool_calls or []):
|
| 71 |
+
_id = (tc or {}).get("id")
|
| 72 |
+
if isinstance(_id, str) and _id:
|
| 73 |
+
ids.append(_id)
|
| 74 |
+
except Exception:
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
if last_input_is_tool and last_input_tool_id and (last_input_tool_id in ids):
|
| 78 |
+
if trailing_assistant_msg is None:
|
| 79 |
+
trailing_assistant_msg = m
|
| 80 |
+
continue
|
| 81 |
+
|
| 82 |
+
result.append(m)
|
| 83 |
+
for _id in ids:
|
| 84 |
+
tr = tool_results_by_id.pop(_id, None)
|
| 85 |
+
if tr is not None:
|
| 86 |
+
result.append(tr)
|
| 87 |
+
continue
|
| 88 |
+
result.append(m)
|
| 89 |
+
|
| 90 |
+
if last_input_is_tool and last_input_tool_id and trailing_assistant_msg is not None:
|
| 91 |
+
result.append(trailing_assistant_msg)
|
| 92 |
+
tr = tool_results_by_id.pop(last_input_tool_id, None)
|
| 93 |
+
if tr is not None:
|
| 94 |
+
result.append(tr)
|
| 95 |
+
|
| 96 |
+
return result
|
protobuf2openai/router.py
ADDED
|
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import json
|
| 5 |
+
import time
|
| 6 |
+
import uuid
|
| 7 |
+
from typing import Any, Dict, List, Optional
|
| 8 |
+
|
| 9 |
+
import requests
|
| 10 |
+
from fastapi import APIRouter, HTTPException
|
| 11 |
+
from fastapi.responses import StreamingResponse
|
| 12 |
+
|
| 13 |
+
from .logging import logger
|
| 14 |
+
|
| 15 |
+
from .models import ChatCompletionsRequest, ChatMessage
|
| 16 |
+
from .reorder import reorder_messages_for_anthropic
|
| 17 |
+
from .helpers import normalize_content_to_list, segments_to_text
|
| 18 |
+
from .packets import packet_template, map_history_to_warp_messages, attach_user_and_tools_to_inputs
|
| 19 |
+
from .state import STATE
|
| 20 |
+
from .config import BRIDGE_BASE_URL
|
| 21 |
+
from .bridge import initialize_once
|
| 22 |
+
from .sse_transform import stream_openai_sse
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
router = APIRouter()
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@router.get("/")
|
| 29 |
+
def root():
|
| 30 |
+
return {"service": "OpenAI Chat Completions (Warp bridge) - Streaming", "status": "ok"}
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@router.get("/healthz")
|
| 34 |
+
def health_check():
|
| 35 |
+
return {"status": "ok", "service": "OpenAI Chat Completions (Warp bridge) - Streaming"}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@router.get("/v1/models")
|
| 39 |
+
def list_models():
|
| 40 |
+
"""OpenAI-compatible model listing. Forwards to bridge, with local fallback."""
|
| 41 |
+
try:
|
| 42 |
+
resp = requests.get(f"{BRIDGE_BASE_URL}/v1/models", timeout=10.0)
|
| 43 |
+
if resp.status_code != 200:
|
| 44 |
+
raise HTTPException(resp.status_code, f"bridge_error: {resp.text}")
|
| 45 |
+
return resp.json()
|
| 46 |
+
except Exception as e:
|
| 47 |
+
try:
|
| 48 |
+
# Local fallback: construct models directly if bridge is unreachable
|
| 49 |
+
from warp2protobuf.config.models import get_all_unique_models # type: ignore
|
| 50 |
+
models = get_all_unique_models()
|
| 51 |
+
return {"object": "list", "data": models}
|
| 52 |
+
except Exception:
|
| 53 |
+
raise HTTPException(502, f"bridge_unreachable: {e}")
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@router.post("/v1/chat/completions")
|
| 57 |
+
async def chat_completions(req: ChatCompletionsRequest):
|
| 58 |
+
try:
|
| 59 |
+
initialize_once()
|
| 60 |
+
except Exception as e:
|
| 61 |
+
logger.warning(f"[OpenAI Compat] initialize_once failed or skipped: {e}")
|
| 62 |
+
|
| 63 |
+
if not req.messages:
|
| 64 |
+
raise HTTPException(400, "messages 不能为空")
|
| 65 |
+
|
| 66 |
+
# 1) 打印接收到的 Chat Completions 原始请求体
|
| 67 |
+
try:
|
| 68 |
+
logger.info("[OpenAI Compat] 接收到的 Chat Completions 请求体(原始): %s", json.dumps(req.dict(), ensure_ascii=False))
|
| 69 |
+
except Exception:
|
| 70 |
+
logger.info("[OpenAI Compat] 接收到的 Chat Completions 请求体(原始) 序列化失败")
|
| 71 |
+
|
| 72 |
+
# 整理消息
|
| 73 |
+
history: List[ChatMessage] = reorder_messages_for_anthropic(list(req.messages))
|
| 74 |
+
|
| 75 |
+
# 2) 打印整理后的请求体(post-reorder)
|
| 76 |
+
try:
|
| 77 |
+
logger.info("[OpenAI Compat] 整理后的请求体(post-reorder): %s", json.dumps({
|
| 78 |
+
**req.dict(),
|
| 79 |
+
"messages": [m.dict() for m in history]
|
| 80 |
+
}, ensure_ascii=False))
|
| 81 |
+
except Exception:
|
| 82 |
+
logger.info("[OpenAI Compat] 整理后的请求体(post-reorder) 序列化失败")
|
| 83 |
+
|
| 84 |
+
system_prompt_text: Optional[str] = None
|
| 85 |
+
try:
|
| 86 |
+
chunks: List[str] = []
|
| 87 |
+
for _m in history:
|
| 88 |
+
if _m.role == "system":
|
| 89 |
+
_txt = segments_to_text(normalize_content_to_list(_m.content))
|
| 90 |
+
if _txt.strip():
|
| 91 |
+
chunks.append(_txt)
|
| 92 |
+
if chunks:
|
| 93 |
+
system_prompt_text = "\n\n".join(chunks)
|
| 94 |
+
except Exception:
|
| 95 |
+
system_prompt_text = None
|
| 96 |
+
|
| 97 |
+
task_id = STATE.baseline_task_id or str(uuid.uuid4())
|
| 98 |
+
packet = packet_template()
|
| 99 |
+
packet["task_context"] = {
|
| 100 |
+
"tasks": [{
|
| 101 |
+
"id": task_id,
|
| 102 |
+
"description": "",
|
| 103 |
+
"status": {"in_progress": {}},
|
| 104 |
+
"messages": map_history_to_warp_messages(history, task_id, None, False),
|
| 105 |
+
}],
|
| 106 |
+
"active_task_id": task_id,
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
packet.setdefault("settings", {}).setdefault("model_config", {})
|
| 110 |
+
packet["settings"]["model_config"]["base"] = req.model or packet["settings"]["model_config"].get("base") or "claude-4.1-opus"
|
| 111 |
+
|
| 112 |
+
if STATE.conversation_id:
|
| 113 |
+
packet.setdefault("metadata", {})["conversation_id"] = STATE.conversation_id
|
| 114 |
+
|
| 115 |
+
attach_user_and_tools_to_inputs(packet, history, system_prompt_text)
|
| 116 |
+
|
| 117 |
+
if req.tools:
|
| 118 |
+
mcp_tools: List[Dict[str, Any]] = []
|
| 119 |
+
for t in req.tools:
|
| 120 |
+
if t.type != "function" or not t.function:
|
| 121 |
+
continue
|
| 122 |
+
mcp_tools.append({
|
| 123 |
+
"name": t.function.name,
|
| 124 |
+
"description": t.function.description or "",
|
| 125 |
+
"input_schema": t.function.parameters or {},
|
| 126 |
+
})
|
| 127 |
+
if mcp_tools:
|
| 128 |
+
packet.setdefault("mcp_context", {}).setdefault("tools", []).extend(mcp_tools)
|
| 129 |
+
|
| 130 |
+
# 3) 打印转换成 protobuf JSON 的请求体(发送到 bridge 的数据包)
|
| 131 |
+
try:
|
| 132 |
+
logger.info("[OpenAI Compat] 转换成 Protobuf JSON 的请求体: %s", json.dumps(packet, ensure_ascii=False))
|
| 133 |
+
except Exception:
|
| 134 |
+
logger.info("[OpenAI Compat] 转换成 Protobuf JSON 的请求体 序列化失败")
|
| 135 |
+
|
| 136 |
+
created_ts = int(time.time())
|
| 137 |
+
completion_id = str(uuid.uuid4())
|
| 138 |
+
model_id = req.model or "warp-default"
|
| 139 |
+
|
| 140 |
+
if req.stream:
|
| 141 |
+
async def _agen():
|
| 142 |
+
async for chunk in stream_openai_sse(packet, completion_id, created_ts, model_id):
|
| 143 |
+
yield chunk
|
| 144 |
+
return StreamingResponse(_agen(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"})
|
| 145 |
+
|
| 146 |
+
def _post_once() -> requests.Response:
|
| 147 |
+
return requests.post(
|
| 148 |
+
f"{BRIDGE_BASE_URL}/api/warp/send_stream",
|
| 149 |
+
json={"json_data": packet, "message_type": "warp.multi_agent.v1.Request"},
|
| 150 |
+
timeout=(5.0, 180.0),
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
try:
|
| 154 |
+
resp = _post_once()
|
| 155 |
+
if resp.status_code == 429:
|
| 156 |
+
try:
|
| 157 |
+
r = requests.post(f"{BRIDGE_BASE_URL}/api/auth/refresh", timeout=10.0)
|
| 158 |
+
logger.warning("[OpenAI Compat] Bridge returned 429. Tried JWT refresh -> HTTP %s", getattr(r, 'status_code', 'N/A'))
|
| 159 |
+
except Exception as _e:
|
| 160 |
+
logger.warning("[OpenAI Compat] JWT refresh attempt failed after 429: %s", _e)
|
| 161 |
+
resp = _post_once()
|
| 162 |
+
if resp.status_code != 200:
|
| 163 |
+
raise HTTPException(resp.status_code, f"bridge_error: {resp.text}")
|
| 164 |
+
bridge_resp = resp.json()
|
| 165 |
+
except Exception as e:
|
| 166 |
+
raise HTTPException(502, f"bridge_unreachable: {e}")
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
STATE.conversation_id = bridge_resp.get("conversation_id") or STATE.conversation_id
|
| 170 |
+
ret_task_id = bridge_resp.get("task_id")
|
| 171 |
+
if isinstance(ret_task_id, str) and ret_task_id:
|
| 172 |
+
STATE.baseline_task_id = ret_task_id
|
| 173 |
+
except Exception:
|
| 174 |
+
pass
|
| 175 |
+
|
| 176 |
+
tool_calls: List[Dict[str, Any]] = []
|
| 177 |
+
try:
|
| 178 |
+
parsed_events = bridge_resp.get("parsed_events", []) or []
|
| 179 |
+
for ev in parsed_events:
|
| 180 |
+
evd = ev.get("parsed_data") or ev.get("raw_data") or {}
|
| 181 |
+
client_actions = evd.get("client_actions") or evd.get("clientActions") or {}
|
| 182 |
+
actions = client_actions.get("actions") or client_actions.get("Actions") or []
|
| 183 |
+
for action in actions:
|
| 184 |
+
add_msgs = action.get("add_messages_to_task") or action.get("addMessagesToTask") or {}
|
| 185 |
+
if not isinstance(add_msgs, dict):
|
| 186 |
+
continue
|
| 187 |
+
for message in add_msgs.get("messages", []) or []:
|
| 188 |
+
tc = message.get("tool_call") or message.get("toolCall") or {}
|
| 189 |
+
call_mcp = tc.get("call_mcp_tool") or tc.get("callMcpTool") or {}
|
| 190 |
+
if isinstance(call_mcp, dict) and call_mcp.get("name"):
|
| 191 |
+
try:
|
| 192 |
+
args_obj = call_mcp.get("args", {}) or {}
|
| 193 |
+
args_str = json.dumps(args_obj, ensure_ascii=False)
|
| 194 |
+
except Exception:
|
| 195 |
+
args_str = "{}"
|
| 196 |
+
tool_calls.append({
|
| 197 |
+
"id": tc.get("tool_call_id") or str(uuid.uuid4()),
|
| 198 |
+
"type": "function",
|
| 199 |
+
"function": {"name": call_mcp.get("name"), "arguments": args_str},
|
| 200 |
+
})
|
| 201 |
+
except Exception:
|
| 202 |
+
pass
|
| 203 |
+
|
| 204 |
+
if tool_calls:
|
| 205 |
+
msg_payload = {"role": "assistant", "content": "", "tool_calls": tool_calls}
|
| 206 |
+
finish_reason = "tool_calls"
|
| 207 |
+
else:
|
| 208 |
+
response_text = bridge_resp.get("response", "")
|
| 209 |
+
msg_payload = {"role": "assistant", "content": response_text}
|
| 210 |
+
finish_reason = "stop"
|
| 211 |
+
|
| 212 |
+
final = {
|
| 213 |
+
"id": completion_id,
|
| 214 |
+
"object": "chat.completion",
|
| 215 |
+
"created": created_ts,
|
| 216 |
+
"model": model_id,
|
| 217 |
+
"choices": [{"index": 0, "message": msg_payload, "finish_reason": finish_reason}],
|
| 218 |
+
}
|
| 219 |
+
return final
|
protobuf2openai/sse_transform.py
ADDED
|
@@ -0,0 +1,345 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import uuid
|
| 5 |
+
from typing import Any, AsyncGenerator, Dict
|
| 6 |
+
|
| 7 |
+
import httpx
|
| 8 |
+
from .logging import logger
|
| 9 |
+
|
| 10 |
+
from .config import BRIDGE_BASE_URL
|
| 11 |
+
from .helpers import _get
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
async def stream_openai_sse(packet: Dict[str, Any], completion_id: str, created_ts: int, model_id: str) -> AsyncGenerator[str, None]:
|
| 15 |
+
try:
|
| 16 |
+
first = {
|
| 17 |
+
"id": completion_id,
|
| 18 |
+
"object": "chat.completion.chunk",
|
| 19 |
+
"created": created_ts,
|
| 20 |
+
"model": model_id,
|
| 21 |
+
"choices": [{"index": 0, "delta": {"role": "assistant"}}],
|
| 22 |
+
}
|
| 23 |
+
# 打印转换后的首个 SSE 事件(OpenAI 格式)
|
| 24 |
+
try:
|
| 25 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(first, ensure_ascii=False))
|
| 26 |
+
except Exception:
|
| 27 |
+
pass
|
| 28 |
+
yield f"data: {json.dumps(first, ensure_ascii=False)}\n\n"
|
| 29 |
+
|
| 30 |
+
timeout = httpx.Timeout(60.0)
|
| 31 |
+
async with httpx.AsyncClient(http2=True, timeout=timeout, trust_env=True) as client:
|
| 32 |
+
def _do_stream():
|
| 33 |
+
return client.stream(
|
| 34 |
+
"POST",
|
| 35 |
+
f"{BRIDGE_BASE_URL}/api/warp/send_stream_sse",
|
| 36 |
+
headers={"accept": "text/event-stream"},
|
| 37 |
+
json={"json_data": packet, "message_type": "warp.multi_agent.v1.Request"},
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
# 首次请求
|
| 41 |
+
response_cm = _do_stream()
|
| 42 |
+
async with response_cm as response:
|
| 43 |
+
if response.status_code == 429:
|
| 44 |
+
try:
|
| 45 |
+
r = await client.post(f"{BRIDGE_BASE_URL}/api/auth/refresh", timeout=10.0)
|
| 46 |
+
logger.warning("[OpenAI Compat] Bridge returned 429. Tried JWT refresh -> HTTP %s", r.status_code)
|
| 47 |
+
except Exception as _e:
|
| 48 |
+
logger.warning("[OpenAI Compat] JWT refresh attempt failed after 429: %s", _e)
|
| 49 |
+
# 重试一次
|
| 50 |
+
response_cm2 = _do_stream()
|
| 51 |
+
async with response_cm2 as response2:
|
| 52 |
+
response = response2
|
| 53 |
+
if response.status_code != 200:
|
| 54 |
+
error_text = await response.aread()
|
| 55 |
+
error_content = error_text.decode("utf-8") if error_text else ""
|
| 56 |
+
logger.error(f"[OpenAI Compat] Bridge HTTP error {response.status_code}: {error_content[:300]}")
|
| 57 |
+
raise RuntimeError(f"bridge error: {error_content}")
|
| 58 |
+
current = ""
|
| 59 |
+
tool_calls_emitted = False
|
| 60 |
+
async for line in response.aiter_lines():
|
| 61 |
+
if line.startswith("data:"):
|
| 62 |
+
payload = line[5:].strip()
|
| 63 |
+
if not payload:
|
| 64 |
+
continue
|
| 65 |
+
# 打印接收到的 Protobuf SSE 原始事件片段
|
| 66 |
+
try:
|
| 67 |
+
logger.info("[OpenAI Compat] 接收到的 Protobuf SSE(data): %s", payload)
|
| 68 |
+
except Exception:
|
| 69 |
+
pass
|
| 70 |
+
if payload == "[DONE]":
|
| 71 |
+
break
|
| 72 |
+
current += payload
|
| 73 |
+
continue
|
| 74 |
+
if (line.strip() == "") and current:
|
| 75 |
+
try:
|
| 76 |
+
ev = json.loads(current)
|
| 77 |
+
except Exception:
|
| 78 |
+
current = ""
|
| 79 |
+
continue
|
| 80 |
+
current = ""
|
| 81 |
+
event_data = (ev or {}).get("parsed_data") or {}
|
| 82 |
+
|
| 83 |
+
# 打印接收到的 Protobuf 事件(解析后)
|
| 84 |
+
try:
|
| 85 |
+
logger.info("[OpenAI Compat] 接收到的 Protobuf 事件(parsed): %s", json.dumps(event_data, ensure_ascii=False))
|
| 86 |
+
except Exception:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
if "init" in event_data:
|
| 90 |
+
pass
|
| 91 |
+
|
| 92 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 93 |
+
if isinstance(client_actions, dict):
|
| 94 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 95 |
+
for action in actions:
|
| 96 |
+
append_data = _get(action, "append_to_message_content", "appendToMessageContent")
|
| 97 |
+
if isinstance(append_data, dict):
|
| 98 |
+
message = append_data.get("message", {})
|
| 99 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 100 |
+
text_content = agent_output.get("text", "")
|
| 101 |
+
if text_content:
|
| 102 |
+
delta = {
|
| 103 |
+
"id": completion_id,
|
| 104 |
+
"object": "chat.completion.chunk",
|
| 105 |
+
"created": created_ts,
|
| 106 |
+
"model": model_id,
|
| 107 |
+
"choices": [{"index": 0, "delta": {"content": text_content}}],
|
| 108 |
+
}
|
| 109 |
+
# 打印转换后的 OpenAI SSE 事件
|
| 110 |
+
try:
|
| 111 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False))
|
| 112 |
+
except Exception:
|
| 113 |
+
pass
|
| 114 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 115 |
+
|
| 116 |
+
messages_data = _get(action, "add_messages_to_task", "addMessagesToTask")
|
| 117 |
+
if isinstance(messages_data, dict):
|
| 118 |
+
messages = messages_data.get("messages", [])
|
| 119 |
+
for message in messages:
|
| 120 |
+
tool_call = _get(message, "tool_call", "toolCall") or {}
|
| 121 |
+
call_mcp = _get(tool_call, "call_mcp_tool", "callMcpTool") or {}
|
| 122 |
+
if isinstance(call_mcp, dict) and call_mcp.get("name"):
|
| 123 |
+
try:
|
| 124 |
+
args_obj = call_mcp.get("args", {}) or {}
|
| 125 |
+
args_str = json.dumps(args_obj, ensure_ascii=False)
|
| 126 |
+
except Exception:
|
| 127 |
+
args_str = "{}"
|
| 128 |
+
tool_call_id = tool_call.get("tool_call_id") or str(uuid.uuid4())
|
| 129 |
+
delta = {
|
| 130 |
+
"id": completion_id,
|
| 131 |
+
"object": "chat.completion.chunk",
|
| 132 |
+
"created": created_ts,
|
| 133 |
+
"model": model_id,
|
| 134 |
+
"choices": [{
|
| 135 |
+
"index": 0,
|
| 136 |
+
"delta": {
|
| 137 |
+
"tool_calls": [{
|
| 138 |
+
"index": 0,
|
| 139 |
+
"id": tool_call_id,
|
| 140 |
+
"type": "function",
|
| 141 |
+
"function": {"name": call_mcp.get("name"), "arguments": args_str},
|
| 142 |
+
}]
|
| 143 |
+
}
|
| 144 |
+
}],
|
| 145 |
+
}
|
| 146 |
+
# 打印转换后的 OpenAI 工具调用事件
|
| 147 |
+
try:
|
| 148 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit tool_calls): %s", json.dumps(delta, ensure_ascii=False))
|
| 149 |
+
except Exception:
|
| 150 |
+
pass
|
| 151 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 152 |
+
tool_calls_emitted = True
|
| 153 |
+
else:
|
| 154 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 155 |
+
text_content = agent_output.get("text", "")
|
| 156 |
+
if text_content:
|
| 157 |
+
delta = {
|
| 158 |
+
"id": completion_id,
|
| 159 |
+
"object": "chat.completion.chunk",
|
| 160 |
+
"created": created_ts,
|
| 161 |
+
"model": model_id,
|
| 162 |
+
"choices": [{"index": 0, "delta": {"content": text_content}}],
|
| 163 |
+
}
|
| 164 |
+
try:
|
| 165 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False))
|
| 166 |
+
except Exception:
|
| 167 |
+
pass
|
| 168 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 169 |
+
|
| 170 |
+
if "finished" in event_data:
|
| 171 |
+
done_chunk = {
|
| 172 |
+
"id": completion_id,
|
| 173 |
+
"object": "chat.completion.chunk",
|
| 174 |
+
"created": created_ts,
|
| 175 |
+
"model": model_id,
|
| 176 |
+
"choices": [{"index": 0, "delta": {}, "finish_reason": ("tool_calls" if tool_calls_emitted else "stop")}],
|
| 177 |
+
}
|
| 178 |
+
try:
|
| 179 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit done): %s", json.dumps(done_chunk, ensure_ascii=False))
|
| 180 |
+
except Exception:
|
| 181 |
+
pass
|
| 182 |
+
yield f"data: {json.dumps(done_chunk, ensure_ascii=False)}\n\n"
|
| 183 |
+
|
| 184 |
+
# 打印完成标记
|
| 185 |
+
try:
|
| 186 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): [DONE]")
|
| 187 |
+
except Exception:
|
| 188 |
+
pass
|
| 189 |
+
yield "data: [DONE]\n\n"
|
| 190 |
+
return
|
| 191 |
+
|
| 192 |
+
if response.status_code != 200:
|
| 193 |
+
error_text = await response.aread()
|
| 194 |
+
error_content = error_text.decode("utf-8") if error_text else ""
|
| 195 |
+
logger.error(f"[OpenAI Compat] Bridge HTTP error {response.status_code}: {error_content[:300]}")
|
| 196 |
+
raise RuntimeError(f"bridge error: {error_content}")
|
| 197 |
+
|
| 198 |
+
current = ""
|
| 199 |
+
tool_calls_emitted = False
|
| 200 |
+
async for line in response.aiter_lines():
|
| 201 |
+
if line.startswith("data:"):
|
| 202 |
+
payload = line[5:].strip()
|
| 203 |
+
if not payload:
|
| 204 |
+
continue
|
| 205 |
+
# 打印接收到的 Protobuf SSE 原始事件片段
|
| 206 |
+
try:
|
| 207 |
+
logger.info("[OpenAI Compat] 接收到的 Protobuf SSE(data): %s", payload)
|
| 208 |
+
except Exception:
|
| 209 |
+
pass
|
| 210 |
+
if payload == "[DONE]":
|
| 211 |
+
break
|
| 212 |
+
current += payload
|
| 213 |
+
continue
|
| 214 |
+
if (line.strip() == "") and current:
|
| 215 |
+
try:
|
| 216 |
+
ev = json.loads(current)
|
| 217 |
+
except Exception:
|
| 218 |
+
current = ""
|
| 219 |
+
continue
|
| 220 |
+
current = ""
|
| 221 |
+
event_data = (ev or {}).get("parsed_data") or {}
|
| 222 |
+
|
| 223 |
+
# 打印接收到的 Protobuf 事件(解析后)
|
| 224 |
+
try:
|
| 225 |
+
logger.info("[OpenAI Compat] 接收到的 Protobuf 事件(parsed): %s", json.dumps(event_data, ensure_ascii=False))
|
| 226 |
+
except Exception:
|
| 227 |
+
pass
|
| 228 |
+
|
| 229 |
+
if "init" in event_data:
|
| 230 |
+
pass
|
| 231 |
+
|
| 232 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 233 |
+
if isinstance(client_actions, dict):
|
| 234 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 235 |
+
for action in actions:
|
| 236 |
+
append_data = _get(action, "append_to_message_content", "appendToMessageContent")
|
| 237 |
+
if isinstance(append_data, dict):
|
| 238 |
+
message = append_data.get("message", {})
|
| 239 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 240 |
+
text_content = agent_output.get("text", "")
|
| 241 |
+
if text_content:
|
| 242 |
+
delta = {
|
| 243 |
+
"id": completion_id,
|
| 244 |
+
"object": "chat.completion.chunk",
|
| 245 |
+
"created": created_ts,
|
| 246 |
+
"model": model_id,
|
| 247 |
+
"choices": [{"index": 0, "delta": {"content": text_content}}],
|
| 248 |
+
}
|
| 249 |
+
# 打印转换后的 OpenAI SSE 事件
|
| 250 |
+
try:
|
| 251 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False))
|
| 252 |
+
except Exception:
|
| 253 |
+
pass
|
| 254 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 255 |
+
|
| 256 |
+
messages_data = _get(action, "add_messages_to_task", "addMessagesToTask")
|
| 257 |
+
if isinstance(messages_data, dict):
|
| 258 |
+
messages = messages_data.get("messages", [])
|
| 259 |
+
for message in messages:
|
| 260 |
+
tool_call = _get(message, "tool_call", "toolCall") or {}
|
| 261 |
+
call_mcp = _get(tool_call, "call_mcp_tool", "callMcpTool") or {}
|
| 262 |
+
if isinstance(call_mcp, dict) and call_mcp.get("name"):
|
| 263 |
+
try:
|
| 264 |
+
args_obj = call_mcp.get("args", {}) or {}
|
| 265 |
+
args_str = json.dumps(args_obj, ensure_ascii=False)
|
| 266 |
+
except Exception:
|
| 267 |
+
args_str = "{}"
|
| 268 |
+
tool_call_id = tool_call.get("tool_call_id") or str(uuid.uuid4())
|
| 269 |
+
delta = {
|
| 270 |
+
"id": completion_id,
|
| 271 |
+
"object": "chat.completion.chunk",
|
| 272 |
+
"created": created_ts,
|
| 273 |
+
"model": model_id,
|
| 274 |
+
"choices": [{
|
| 275 |
+
"index": 0,
|
| 276 |
+
"delta": {
|
| 277 |
+
"tool_calls": [{
|
| 278 |
+
"index": 0,
|
| 279 |
+
"id": tool_call_id,
|
| 280 |
+
"type": "function",
|
| 281 |
+
"function": {"name": call_mcp.get("name"), "arguments": args_str},
|
| 282 |
+
}]
|
| 283 |
+
}
|
| 284 |
+
}],
|
| 285 |
+
}
|
| 286 |
+
# 打印转换后的 OpenAI 工具调用事件
|
| 287 |
+
try:
|
| 288 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit tool_calls): %s", json.dumps(delta, ensure_ascii=False))
|
| 289 |
+
except Exception:
|
| 290 |
+
pass
|
| 291 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 292 |
+
tool_calls_emitted = True
|
| 293 |
+
else:
|
| 294 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 295 |
+
text_content = agent_output.get("text", "")
|
| 296 |
+
if text_content:
|
| 297 |
+
delta = {
|
| 298 |
+
"id": completion_id,
|
| 299 |
+
"object": "chat.completion.chunk",
|
| 300 |
+
"created": created_ts,
|
| 301 |
+
"model": model_id,
|
| 302 |
+
"choices": [{"index": 0, "delta": {"content": text_content}}],
|
| 303 |
+
}
|
| 304 |
+
try:
|
| 305 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): %s", json.dumps(delta, ensure_ascii=False))
|
| 306 |
+
except Exception:
|
| 307 |
+
pass
|
| 308 |
+
yield f"data: {json.dumps(delta, ensure_ascii=False)}\n\n"
|
| 309 |
+
|
| 310 |
+
if "finished" in event_data:
|
| 311 |
+
done_chunk = {
|
| 312 |
+
"id": completion_id,
|
| 313 |
+
"object": "chat.completion.chunk",
|
| 314 |
+
"created": created_ts,
|
| 315 |
+
"model": model_id,
|
| 316 |
+
"choices": [{"index": 0, "delta": {}, "finish_reason": ("tool_calls" if tool_calls_emitted else "stop")}],
|
| 317 |
+
}
|
| 318 |
+
try:
|
| 319 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit done): %s", json.dumps(done_chunk, ensure_ascii=False))
|
| 320 |
+
except Exception:
|
| 321 |
+
pass
|
| 322 |
+
yield f"data: {json.dumps(done_chunk, ensure_ascii=False)}\n\n"
|
| 323 |
+
|
| 324 |
+
# 打印完成标记
|
| 325 |
+
try:
|
| 326 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit): [DONE]")
|
| 327 |
+
except Exception:
|
| 328 |
+
pass
|
| 329 |
+
yield "data: [DONE]\n\n"
|
| 330 |
+
except Exception as e:
|
| 331 |
+
logger.error(f"[OpenAI Compat] Stream processing failed: {e}")
|
| 332 |
+
error_chunk = {
|
| 333 |
+
"id": completion_id,
|
| 334 |
+
"object": "chat.completion.chunk",
|
| 335 |
+
"created": created_ts,
|
| 336 |
+
"model": model_id,
|
| 337 |
+
"choices": [{"index": 0, "delta": {}, "finish_reason": "error"}],
|
| 338 |
+
"error": {"message": str(e)},
|
| 339 |
+
}
|
| 340 |
+
try:
|
| 341 |
+
logger.info("[OpenAI Compat] 转换后的 SSE(emit error): %s", json.dumps(error_chunk, ensure_ascii=False))
|
| 342 |
+
except Exception:
|
| 343 |
+
pass
|
| 344 |
+
yield f"data: {json.dumps(error_chunk, ensure_ascii=False)}\n\n"
|
| 345 |
+
yield "data: [DONE]\n\n"
|
protobuf2openai/state.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import uuid
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class BridgeState(BaseModel):
|
| 9 |
+
conversation_id: Optional[str] = None
|
| 10 |
+
baseline_task_id: Optional[str] = None
|
| 11 |
+
tool_call_id: Optional[str] = None
|
| 12 |
+
tool_message_id: Optional[str] = None
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
STATE = BridgeState()
|
| 16 |
+
|
| 17 |
+
# Initialize tool ids lazily when needed
|
| 18 |
+
|
| 19 |
+
def ensure_tool_ids():
|
| 20 |
+
if not STATE.tool_call_id:
|
| 21 |
+
STATE.tool_call_id = str(uuid.uuid4())
|
| 22 |
+
if not STATE.tool_message_id:
|
| 23 |
+
STATE.tool_message_id = str(uuid.uuid4())
|
pyproject.toml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "warptestui"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = "Test UI for Warp"
|
| 5 |
+
readme = "README.md"
|
| 6 |
+
requires-python = ">=3.13"
|
| 7 |
+
dependencies = [
|
| 8 |
+
"fastapi[standard]",
|
| 9 |
+
"uvicorn[standard]",
|
| 10 |
+
"httpx[http2]",
|
| 11 |
+
"protobuf",
|
| 12 |
+
"grpcio-tools",
|
| 13 |
+
"python-dotenv",
|
| 14 |
+
"websockets>=15.0.1",
|
| 15 |
+
"requests>=2.32.5",
|
| 16 |
+
"openai>=1.106.0",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
[project.scripts]
|
| 20 |
+
warp-server = "server:main"
|
| 21 |
+
warp-test = "main:main"
|
| 22 |
+
|
| 23 |
+
[[tool.uv.index]]
|
| 24 |
+
url = "https://mirrors.ustc.edu.cn/pypi/simple"
|
| 25 |
+
default = true
|
server.py
ADDED
|
@@ -0,0 +1,585 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Warp Protobuf编解码服务器启动文件
|
| 5 |
+
|
| 6 |
+
纯protobuf编解码服务器,提供JSON<->Protobuf转换、WebSocket监控和静态文件服务。
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from typing import Dict, Optional, Tuple
|
| 10 |
+
import base64
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from contextlib import asynccontextmanager
|
| 13 |
+
|
| 14 |
+
import uvicorn
|
| 15 |
+
from fastapi import FastAPI
|
| 16 |
+
from fastapi.staticfiles import StaticFiles
|
| 17 |
+
from fastapi.responses import HTMLResponse
|
| 18 |
+
from fastapi import Query, HTTPException
|
| 19 |
+
from fastapi.responses import Response
|
| 20 |
+
|
| 21 |
+
# 新增:类型导入
|
| 22 |
+
from typing import Any
|
| 23 |
+
|
| 24 |
+
from warp2protobuf.api.protobuf_routes import app as protobuf_app
|
| 25 |
+
from warp2protobuf.core.logging import logger, set_log_file
|
| 26 |
+
from warp2protobuf.api.protobuf_routes import EncodeRequest, _encode_smd_inplace
|
| 27 |
+
from warp2protobuf.core.protobuf_utils import dict_to_protobuf_bytes
|
| 28 |
+
from warp2protobuf.core.schema_sanitizer import sanitize_mcp_input_schema_in_packet
|
| 29 |
+
from warp2protobuf.core.auth import acquire_anonymous_access_token
|
| 30 |
+
from warp2protobuf.config.models import get_all_unique_models
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# ============= 工具:input_schema 清理与校验 =============
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _is_empty_value(value: Any) -> bool:
|
| 37 |
+
if value is None:
|
| 38 |
+
return True
|
| 39 |
+
if isinstance(value, str) and value.strip() == "":
|
| 40 |
+
return True
|
| 41 |
+
if isinstance(value, (list, dict)) and len(value) == 0:
|
| 42 |
+
return True
|
| 43 |
+
return False
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def _deep_clean(value: Any) -> Any:
|
| 47 |
+
if isinstance(value, dict):
|
| 48 |
+
cleaned: Dict[str, Any] = {}
|
| 49 |
+
for k, v in value.items():
|
| 50 |
+
vv = _deep_clean(v)
|
| 51 |
+
if _is_empty_value(vv):
|
| 52 |
+
continue
|
| 53 |
+
cleaned[k] = vv
|
| 54 |
+
return cleaned
|
| 55 |
+
if isinstance(value, list):
|
| 56 |
+
cleaned_list = []
|
| 57 |
+
for item in value:
|
| 58 |
+
ii = _deep_clean(item)
|
| 59 |
+
if _is_empty_value(ii):
|
| 60 |
+
continue
|
| 61 |
+
cleaned_list.append(ii)
|
| 62 |
+
return cleaned_list
|
| 63 |
+
if isinstance(value, str):
|
| 64 |
+
return value.strip()
|
| 65 |
+
return value
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _infer_type_for_property(prop_name: str) -> str:
|
| 69 |
+
name = prop_name.lower()
|
| 70 |
+
if name in ("url", "uri", "href", "link"):
|
| 71 |
+
return "string"
|
| 72 |
+
if name in ("headers", "options", "params", "payload", "data"):
|
| 73 |
+
return "object"
|
| 74 |
+
return "string"
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _ensure_property_schema(name: str, schema: Dict[str, Any]) -> Dict[str, Any]:
|
| 78 |
+
prop = dict(schema) if isinstance(schema, dict) else {}
|
| 79 |
+
prop = _deep_clean(prop)
|
| 80 |
+
|
| 81 |
+
# 必填:type & description
|
| 82 |
+
if (
|
| 83 |
+
"type" not in prop
|
| 84 |
+
or not isinstance(prop.get("type"), str)
|
| 85 |
+
or not prop["type"].strip()
|
| 86 |
+
):
|
| 87 |
+
prop["type"] = _infer_type_for_property(name)
|
| 88 |
+
if (
|
| 89 |
+
"description" not in prop
|
| 90 |
+
or not isinstance(prop.get("description"), str)
|
| 91 |
+
or not prop["description"].strip()
|
| 92 |
+
):
|
| 93 |
+
prop["description"] = f"{name} parameter"
|
| 94 |
+
|
| 95 |
+
# 特殊处理 headers:必须是对象,且其 properties 不能是空
|
| 96 |
+
if name.lower() == "headers":
|
| 97 |
+
prop["type"] = "object"
|
| 98 |
+
headers_props = prop.get("properties")
|
| 99 |
+
if not isinstance(headers_props, dict):
|
| 100 |
+
headers_props = {}
|
| 101 |
+
headers_props = _deep_clean(headers_props)
|
| 102 |
+
if not headers_props:
|
| 103 |
+
headers_props = {
|
| 104 |
+
"user-agent": {
|
| 105 |
+
"type": "string",
|
| 106 |
+
"description": "User-Agent header for the request",
|
| 107 |
+
}
|
| 108 |
+
}
|
| 109 |
+
else:
|
| 110 |
+
# 清理并保证每个 header 的子属性都具备 type/description
|
| 111 |
+
fixed_headers: Dict[str, Any] = {}
|
| 112 |
+
for hk, hv in headers_props.items():
|
| 113 |
+
sub = _deep_clean(hv if isinstance(hv, dict) else {})
|
| 114 |
+
if (
|
| 115 |
+
"type" not in sub
|
| 116 |
+
or not isinstance(sub.get("type"), str)
|
| 117 |
+
or not sub["type"].strip()
|
| 118 |
+
):
|
| 119 |
+
sub["type"] = "string"
|
| 120 |
+
if (
|
| 121 |
+
"description" not in sub
|
| 122 |
+
or not isinstance(sub.get("description"), str)
|
| 123 |
+
or not sub["description"].strip()
|
| 124 |
+
):
|
| 125 |
+
sub["description"] = f"{hk} header"
|
| 126 |
+
fixed_headers[hk] = sub
|
| 127 |
+
headers_props = fixed_headers
|
| 128 |
+
prop["properties"] = headers_props
|
| 129 |
+
# 处理 required 空数组
|
| 130 |
+
if isinstance(prop.get("required"), list):
|
| 131 |
+
req = [
|
| 132 |
+
r for r in prop["required"] if isinstance(r, str) and r in headers_props
|
| 133 |
+
]
|
| 134 |
+
if req:
|
| 135 |
+
prop["required"] = req
|
| 136 |
+
else:
|
| 137 |
+
prop.pop("required", None)
|
| 138 |
+
# additionalProperties 若为空 dict,删除;保留显式 True/False
|
| 139 |
+
if (
|
| 140 |
+
isinstance(prop.get("additionalProperties"), dict)
|
| 141 |
+
and len(prop["additionalProperties"]) == 0
|
| 142 |
+
):
|
| 143 |
+
prop.pop("additionalProperties", None)
|
| 144 |
+
|
| 145 |
+
return prop
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _sanitize_json_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
|
| 149 |
+
s = _deep_clean(schema if isinstance(schema, dict) else {})
|
| 150 |
+
|
| 151 |
+
# 如果存在 properties,则顶层应为 object
|
| 152 |
+
if "properties" in s and not isinstance(s.get("type"), str):
|
| 153 |
+
s["type"] = "object"
|
| 154 |
+
|
| 155 |
+
# 修正 $schema
|
| 156 |
+
if "$schema" in s and not isinstance(s["$schema"], str):
|
| 157 |
+
s.pop("$schema", None)
|
| 158 |
+
if "$schema" not in s:
|
| 159 |
+
s["$schema"] = "http://json-schema.org/draft-07/schema#"
|
| 160 |
+
|
| 161 |
+
properties = s.get("properties")
|
| 162 |
+
if isinstance(properties, dict):
|
| 163 |
+
fixed_props: Dict[str, Any] = {}
|
| 164 |
+
for name, subschema in properties.items():
|
| 165 |
+
fixed_props[name] = _ensure_property_schema(
|
| 166 |
+
name, subschema if isinstance(subschema, dict) else {}
|
| 167 |
+
)
|
| 168 |
+
s["properties"] = fixed_props
|
| 169 |
+
|
| 170 |
+
# required:去掉不存在的属性,且不允许为空列表
|
| 171 |
+
if isinstance(s.get("required"), list):
|
| 172 |
+
if isinstance(properties, dict):
|
| 173 |
+
req = [r for r in s["required"] if isinstance(r, str) and r in properties]
|
| 174 |
+
else:
|
| 175 |
+
req = []
|
| 176 |
+
if req:
|
| 177 |
+
s["required"] = req
|
| 178 |
+
else:
|
| 179 |
+
s.pop("required", None)
|
| 180 |
+
|
| 181 |
+
# additionalProperties:空 dict 视为无效,删除
|
| 182 |
+
if (
|
| 183 |
+
isinstance(s.get("additionalProperties"), dict)
|
| 184 |
+
and len(s["additionalProperties"]) == 0
|
| 185 |
+
):
|
| 186 |
+
s.pop("additionalProperties", None)
|
| 187 |
+
|
| 188 |
+
return s
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class _InputSchemaSanitizerMiddleware: # deprecated; use sanitize_mcp_input_schema_in_packet in handlers
|
| 192 |
+
pass
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
# ============= 应用创建 =============
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
@asynccontextmanager
|
| 199 |
+
async def lifespan(app: FastAPI):
|
| 200 |
+
"""应用生命周期管理"""
|
| 201 |
+
# 启动时执行
|
| 202 |
+
await startup_tasks()
|
| 203 |
+
yield
|
| 204 |
+
# 关闭时执行(如果需要的话)
|
| 205 |
+
pass
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def create_app() -> FastAPI:
|
| 209 |
+
"""创建FastAPI应用"""
|
| 210 |
+
# 将服务器日志重定向到专用文件
|
| 211 |
+
try:
|
| 212 |
+
set_log_file("warp_server.log")
|
| 213 |
+
except Exception:
|
| 214 |
+
pass
|
| 215 |
+
|
| 216 |
+
# 使用protobuf路由的应用作为主应用,并添加lifespan处理器
|
| 217 |
+
app = FastAPI(lifespan=lifespan)
|
| 218 |
+
|
| 219 |
+
# 将protobuf路由包含到主应用中
|
| 220 |
+
app.mount("/", protobuf_app)
|
| 221 |
+
|
| 222 |
+
# 挂载输入 schema 清理中间件(覆盖 Warp 相关端点)
|
| 223 |
+
|
| 224 |
+
# 检查静态文件目录
|
| 225 |
+
static_dir = Path("static")
|
| 226 |
+
if static_dir.exists():
|
| 227 |
+
# 挂载静态文件服务
|
| 228 |
+
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 229 |
+
logger.info("✅ 静态文件服务已启用: /static")
|
| 230 |
+
|
| 231 |
+
# 添加根路径重定向到前端界面
|
| 232 |
+
@app.get("/gui", response_class=HTMLResponse)
|
| 233 |
+
async def serve_gui():
|
| 234 |
+
"""提供前端GUI界面"""
|
| 235 |
+
index_file = static_dir / "index.html"
|
| 236 |
+
if index_file.exists():
|
| 237 |
+
return HTMLResponse(content=index_file.read_text(encoding="utf-8"))
|
| 238 |
+
else:
|
| 239 |
+
return HTMLResponse(
|
| 240 |
+
content="""
|
| 241 |
+
<html>
|
| 242 |
+
<body>
|
| 243 |
+
<h1>前端界面文件未找到</h1>
|
| 244 |
+
<p>请确保 static/index.html 文件存在</p>
|
| 245 |
+
</body>
|
| 246 |
+
</html>
|
| 247 |
+
"""
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
logger.warning("静态文件目录不存在,GUI界面将不可用")
|
| 251 |
+
|
| 252 |
+
@app.get("/gui", response_class=HTMLResponse)
|
| 253 |
+
async def no_gui():
|
| 254 |
+
return HTMLResponse(
|
| 255 |
+
content="""
|
| 256 |
+
<html>
|
| 257 |
+
<body>
|
| 258 |
+
<h1>GUI界面未安装</h1>
|
| 259 |
+
<p>静态文件目录 'static' 不存在</p>
|
| 260 |
+
<p>请创建前端界面文件</p>
|
| 261 |
+
</body>
|
| 262 |
+
</html>
|
| 263 |
+
"""
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
# ============= 新增接口:返回protobuf编码后的AI请求字节 =============
|
| 267 |
+
@app.post("/api/warp/encode_raw")
|
| 268 |
+
async def encode_ai_request_raw(
|
| 269 |
+
request: EncodeRequest,
|
| 270 |
+
output: str = Query(
|
| 271 |
+
"raw",
|
| 272 |
+
description="输出格式:raw(默认,返回application/x-protobuf字节) 或 base64",
|
| 273 |
+
regex=r"^(raw|base64)$",
|
| 274 |
+
),
|
| 275 |
+
):
|
| 276 |
+
try:
|
| 277 |
+
# 获取实际数据并验证
|
| 278 |
+
actual_data = request.get_data()
|
| 279 |
+
if not actual_data:
|
| 280 |
+
raise HTTPException(400, "数据包不能为空")
|
| 281 |
+
|
| 282 |
+
# 在 encode 之前,对 mcp_context.tools[*].input_schema 做一次安全清理
|
| 283 |
+
if isinstance(actual_data, dict):
|
| 284 |
+
wrapped = {"json_data": actual_data}
|
| 285 |
+
wrapped = sanitize_mcp_input_schema_in_packet(wrapped)
|
| 286 |
+
actual_data = wrapped.get("json_data", actual_data)
|
| 287 |
+
|
| 288 |
+
# 将 server_message_data 对象(如有)编码为 Base64URL 字符串
|
| 289 |
+
actual_data = _encode_smd_inplace(actual_data)
|
| 290 |
+
|
| 291 |
+
# 编码为protobuf字节
|
| 292 |
+
protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type)
|
| 293 |
+
logger.info(f"✅ AI请求编码为protobuf成功: {len(protobuf_bytes)} 字节")
|
| 294 |
+
|
| 295 |
+
if output == "raw":
|
| 296 |
+
# 直接返回二进制 protobuf 内容
|
| 297 |
+
return Response(
|
| 298 |
+
content=protobuf_bytes,
|
| 299 |
+
media_type="application/x-protobuf",
|
| 300 |
+
headers={"Content-Length": str(len(protobuf_bytes))},
|
| 301 |
+
)
|
| 302 |
+
else:
|
| 303 |
+
# 返回base64文本,便于在JSON中传输/调试
|
| 304 |
+
import base64
|
| 305 |
+
|
| 306 |
+
return {
|
| 307 |
+
"protobuf_base64": base64.b64encode(protobuf_bytes).decode("utf-8"),
|
| 308 |
+
"size": len(protobuf_bytes),
|
| 309 |
+
"message_type": request.message_type,
|
| 310 |
+
}
|
| 311 |
+
except HTTPException:
|
| 312 |
+
raise
|
| 313 |
+
except Exception as e:
|
| 314 |
+
logger.error(f"❌ AI请求编码失败: {e}")
|
| 315 |
+
raise HTTPException(500, f"编码失败: {str(e)}")
|
| 316 |
+
|
| 317 |
+
# ============= OpenAI 兼容:模型列表接口 =============
|
| 318 |
+
@app.get("/v1/models")
|
| 319 |
+
async def list_models():
|
| 320 |
+
"""OpenAI-compatible endpoint that lists available models."""
|
| 321 |
+
try:
|
| 322 |
+
models = get_all_unique_models()
|
| 323 |
+
return {"object": "list", "data": models}
|
| 324 |
+
except Exception as e:
|
| 325 |
+
logger.error(f"❌ 获取模型列表失败: {e}")
|
| 326 |
+
raise HTTPException(500, f"获取模型列表失败: {str(e)}")
|
| 327 |
+
|
| 328 |
+
return app
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
############################################################
|
| 332 |
+
# server_message_data 深度编解码工具
|
| 333 |
+
############################################################
|
| 334 |
+
|
| 335 |
+
# 说明:
|
| 336 |
+
# 根据抓包与分析,server_message_data 是 Base64URL 编码的 proto3 消息:
|
| 337 |
+
# - 字段 1:string(通常为 36 字节 UUID)
|
| 338 |
+
# - 字段 3:google.protobuf.Timestamp(字段1=seconds,字段2=nanos)
|
| 339 |
+
# 可能出现:仅 Timestamp、仅 UUID、或 UUID + Timestamp。
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
from zoneinfo import ZoneInfo # Python 3.9+
|
| 343 |
+
except Exception:
|
| 344 |
+
ZoneInfo = None # type: ignore
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def _b64url_decode_padded(s: str) -> bytes:
|
| 348 |
+
t = s.replace("-", "+").replace("_", "/")
|
| 349 |
+
pad = (-len(t)) % 4
|
| 350 |
+
if pad:
|
| 351 |
+
t += "=" * pad
|
| 352 |
+
return base64.b64decode(t)
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def _b64url_encode_nopad(b: bytes) -> str:
|
| 356 |
+
return base64.urlsafe_b64encode(b).decode("ascii").rstrip("=")
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
def _read_varint(buf: bytes, i: int) -> Tuple[int, int]:
|
| 360 |
+
shift = 0
|
| 361 |
+
val = 0
|
| 362 |
+
while i < len(buf):
|
| 363 |
+
b = buf[i]
|
| 364 |
+
i += 1
|
| 365 |
+
val |= (b & 0x7F) << shift
|
| 366 |
+
if not (b & 0x80):
|
| 367 |
+
return val, i
|
| 368 |
+
shift += 7
|
| 369 |
+
if shift > 63:
|
| 370 |
+
break
|
| 371 |
+
raise ValueError("invalid varint")
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def _write_varint(v: int) -> bytes:
|
| 375 |
+
out = bytearray()
|
| 376 |
+
vv = int(v)
|
| 377 |
+
while True:
|
| 378 |
+
to_write = vv & 0x7F
|
| 379 |
+
vv >>= 7
|
| 380 |
+
if vv:
|
| 381 |
+
out.append(to_write | 0x80)
|
| 382 |
+
else:
|
| 383 |
+
out.append(to_write)
|
| 384 |
+
break
|
| 385 |
+
return bytes(out)
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def _make_key(field_no: int, wire_type: int) -> bytes:
|
| 389 |
+
return _write_varint((field_no << 3) | wire_type)
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
def _decode_timestamp(buf: bytes) -> Tuple[Optional[int], Optional[int]]:
|
| 393 |
+
# google.protobuf.Timestamp: field 1 = seconds (int64 varint), field 2 = nanos (int32 varint)
|
| 394 |
+
i = 0
|
| 395 |
+
seconds: Optional[int] = None
|
| 396 |
+
nanos: Optional[int] = None
|
| 397 |
+
while i < len(buf):
|
| 398 |
+
key, i = _read_varint(buf, i)
|
| 399 |
+
field_no = key >> 3
|
| 400 |
+
wt = key & 0x07
|
| 401 |
+
if wt == 0: # varint
|
| 402 |
+
val, i = _read_varint(buf, i)
|
| 403 |
+
if field_no == 1:
|
| 404 |
+
seconds = int(val)
|
| 405 |
+
elif field_no == 2:
|
| 406 |
+
nanos = int(val)
|
| 407 |
+
elif wt == 2: # length-delimited (not expected inside Timestamp)
|
| 408 |
+
ln, i2 = _read_varint(buf, i)
|
| 409 |
+
i = i2 + ln
|
| 410 |
+
elif wt == 1:
|
| 411 |
+
i += 8
|
| 412 |
+
elif wt == 5:
|
| 413 |
+
i += 4
|
| 414 |
+
else:
|
| 415 |
+
break
|
| 416 |
+
return seconds, nanos
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def _encode_timestamp(seconds: Optional[int], nanos: Optional[int]) -> bytes:
|
| 420 |
+
parts = bytearray()
|
| 421 |
+
if seconds is not None:
|
| 422 |
+
parts += _make_key(1, 0) # field 1, varint
|
| 423 |
+
parts += _write_varint(int(seconds))
|
| 424 |
+
if nanos is not None:
|
| 425 |
+
parts += _make_key(2, 0) # field 2, varint
|
| 426 |
+
parts += _write_varint(int(nanos))
|
| 427 |
+
return bytes(parts)
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def decode_server_message_data(b64url: str) -> Dict:
|
| 431 |
+
"""解码 Base64URL 的 server_message_data,返回结构化信息。"""
|
| 432 |
+
try:
|
| 433 |
+
raw = _b64url_decode_padded(b64url)
|
| 434 |
+
except Exception as e:
|
| 435 |
+
return {"error": f"base64url decode failed: {e}", "raw_b64url": b64url}
|
| 436 |
+
|
| 437 |
+
i = 0
|
| 438 |
+
uuid: Optional[str] = None
|
| 439 |
+
seconds: Optional[int] = None
|
| 440 |
+
nanos: Optional[int] = None
|
| 441 |
+
|
| 442 |
+
while i < len(raw):
|
| 443 |
+
key, i = _read_varint(raw, i)
|
| 444 |
+
field_no = key >> 3
|
| 445 |
+
wt = key & 0x07
|
| 446 |
+
if wt == 2: # length-delimited
|
| 447 |
+
ln, i2 = _read_varint(raw, i)
|
| 448 |
+
i = i2
|
| 449 |
+
data = raw[i : i + ln]
|
| 450 |
+
i += ln
|
| 451 |
+
if field_no == 1: # uuid string
|
| 452 |
+
try:
|
| 453 |
+
uuid = data.decode("utf-8")
|
| 454 |
+
except Exception:
|
| 455 |
+
uuid = None
|
| 456 |
+
elif field_no == 3: # google.protobuf.Timestamp
|
| 457 |
+
seconds, nanos = _decode_timestamp(data)
|
| 458 |
+
elif wt == 0: # varint -> not expected, skip
|
| 459 |
+
_, i = _read_varint(raw, i)
|
| 460 |
+
elif wt == 1:
|
| 461 |
+
i += 8
|
| 462 |
+
elif wt == 5:
|
| 463 |
+
i += 4
|
| 464 |
+
else:
|
| 465 |
+
break
|
| 466 |
+
|
| 467 |
+
out: Dict[str, Any] = {}
|
| 468 |
+
if uuid is not None:
|
| 469 |
+
out["uuid"] = uuid
|
| 470 |
+
if seconds is not None:
|
| 471 |
+
out["seconds"] = seconds
|
| 472 |
+
if nanos is not None:
|
| 473 |
+
out["nanos"] = nanos
|
| 474 |
+
return out
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
def encode_server_message_data(
|
| 478 |
+
uuid: Optional[str] = None,
|
| 479 |
+
seconds: Optional[int] = None,
|
| 480 |
+
nanos: Optional[int] = None,
|
| 481 |
+
) -> str:
|
| 482 |
+
"""将 uuid/seconds/nanos 组合编码为 Base64URL 字符串。"""
|
| 483 |
+
parts = bytearray()
|
| 484 |
+
if uuid:
|
| 485 |
+
b = uuid.encode("utf-8")
|
| 486 |
+
parts += _make_key(1, 2) # field 1, length-delimited
|
| 487 |
+
parts += _write_varint(len(b))
|
| 488 |
+
parts += b
|
| 489 |
+
|
| 490 |
+
if seconds is not None or nanos is not None:
|
| 491 |
+
ts = _encode_timestamp(seconds, nanos)
|
| 492 |
+
parts += _make_key(3, 2) # field 3, length-delimited
|
| 493 |
+
parts += _write_varint(len(ts))
|
| 494 |
+
parts += ts
|
| 495 |
+
|
| 496 |
+
return _b64url_encode_nopad(bytes(parts))
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
async def startup_tasks():
|
| 500 |
+
"""启动时执行的任务"""
|
| 501 |
+
logger.info("=" * 60)
|
| 502 |
+
logger.info("Warp Protobuf编解码服务器启动")
|
| 503 |
+
logger.info("=" * 60)
|
| 504 |
+
|
| 505 |
+
# 检查protobuf运行时
|
| 506 |
+
try:
|
| 507 |
+
from warp2protobuf.core.protobuf import ensure_proto_runtime
|
| 508 |
+
|
| 509 |
+
ensure_proto_runtime()
|
| 510 |
+
logger.info("✅ Protobuf运行时初始化成功")
|
| 511 |
+
except Exception as e:
|
| 512 |
+
logger.error(f"❌ Protobuf运行时初始化失败: {e}")
|
| 513 |
+
raise
|
| 514 |
+
|
| 515 |
+
# 检查JWT token
|
| 516 |
+
try:
|
| 517 |
+
from warp2protobuf.core.auth import get_jwt_token, is_token_expired
|
| 518 |
+
|
| 519 |
+
token = get_jwt_token()
|
| 520 |
+
if token and not is_token_expired(token):
|
| 521 |
+
logger.info("✅ JWT token有效")
|
| 522 |
+
elif not token:
|
| 523 |
+
logger.warning("⚠️ 未找到JWT token,尝试申请匿名访问token用于额度初始化…")
|
| 524 |
+
try:
|
| 525 |
+
new_token = await acquire_anonymous_access_token()
|
| 526 |
+
if new_token:
|
| 527 |
+
logger.info("✅ 匿名访问token申请成功")
|
| 528 |
+
else:
|
| 529 |
+
logger.warning("⚠️ 匿名访问token申请失败")
|
| 530 |
+
except Exception as e2:
|
| 531 |
+
logger.warning(f"⚠️ 匿名访问token申请异常: {e2}")
|
| 532 |
+
else:
|
| 533 |
+
logger.warning("⚠️ JWT token无效或已过期,建议运行: uv run refresh_jwt.py")
|
| 534 |
+
except Exception as e:
|
| 535 |
+
logger.warning(f"⚠️ JWT检查失败: {e}")
|
| 536 |
+
|
| 537 |
+
# 如需 OpenAI 兼容层,请单独运行 src/openai_compat_server.py
|
| 538 |
+
|
| 539 |
+
# 显示可用端点
|
| 540 |
+
logger.info("-" * 40)
|
| 541 |
+
logger.info("可用的API端点:")
|
| 542 |
+
logger.info(" GET / - 服务信息")
|
| 543 |
+
logger.info(" GET /healthz - 健康检查")
|
| 544 |
+
logger.info(" GET /gui - Web GUI界面")
|
| 545 |
+
logger.info(" POST /api/encode - JSON -> Protobuf编码")
|
| 546 |
+
logger.info(" POST /api/decode - Protobuf -> JSON解码")
|
| 547 |
+
logger.info(" POST /api/stream-decode - 流式protobuf解码")
|
| 548 |
+
logger.info(" POST /api/warp/send - JSON -> Protobuf -> Warp API转发")
|
| 549 |
+
logger.info(
|
| 550 |
+
" POST /api/warp/send_stream - JSON -> Protobuf -> Warp API转发(返回解析事件)"
|
| 551 |
+
)
|
| 552 |
+
logger.info(
|
| 553 |
+
" POST /api/warp/send_stream_sse - JSON -> Protobuf -> Warp API转发(实时SSE,事件已解析)"
|
| 554 |
+
)
|
| 555 |
+
logger.info(" POST /api/warp/graphql/* - GraphQL请求转发到Warp API(带鉴权)")
|
| 556 |
+
logger.info(" GET /api/schemas - Protobuf schema信息")
|
| 557 |
+
logger.info(" GET /api/auth/status - JWT认证状态")
|
| 558 |
+
logger.info(" POST /api/auth/refresh - 刷新JWT token")
|
| 559 |
+
logger.info(" GET /api/auth/user_id - 获取当前用户ID")
|
| 560 |
+
logger.info(" GET /api/packets/history - 数据包历史记录")
|
| 561 |
+
logger.info(" WS /ws - WebSocket实时监控")
|
| 562 |
+
logger.info("-" * 40)
|
| 563 |
+
logger.info("测试命令:")
|
| 564 |
+
logger.info(" uv run main.py --test basic - 运行基础测试")
|
| 565 |
+
logger.info(" uv run main.py --list - 查看所有测试场景")
|
| 566 |
+
logger.info("=" * 60)
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
def main():
|
| 570 |
+
"""主函数"""
|
| 571 |
+
# 创建应用
|
| 572 |
+
app = create_app()
|
| 573 |
+
|
| 574 |
+
# 启动服务器
|
| 575 |
+
try:
|
| 576 |
+
uvicorn.run(app, host="0.0.0.0", port=8000, log_level="info", access_log=True)
|
| 577 |
+
except KeyboardInterrupt:
|
| 578 |
+
logger.info("服务器被用户停止")
|
| 579 |
+
except Exception as e:
|
| 580 |
+
logger.error(f"服务器启动失败: {e}")
|
| 581 |
+
raise
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
if __name__ == "__main__":
|
| 585 |
+
main()
|
start.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
import asyncio
|
| 3 |
+
import os
|
| 4 |
+
import threading
|
| 5 |
+
from protobuf2openai.app import app as openai_server # FastAPI app
|
| 6 |
+
from server import create_app, startup_tasks
|
| 7 |
+
import uvicorn
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
async def main():
|
| 11 |
+
# 在后台线程启动 warp server
|
| 12 |
+
warp_app = create_app()
|
| 13 |
+
await startup_tasks()
|
| 14 |
+
|
| 15 |
+
# 启动 warp server 的后台线程
|
| 16 |
+
warp_thread = threading.Thread(
|
| 17 |
+
target=uvicorn.run,
|
| 18 |
+
args=(warp_app,),
|
| 19 |
+
kwargs={"host": "0.0.0.0", "port": 8000, "log_level": "info", "access_log": True},
|
| 20 |
+
daemon=True
|
| 21 |
+
)
|
| 22 |
+
warp_thread.start()
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
from warp2protobuf.core.auth import refresh_jwt_if_needed as _refresh_jwt
|
| 26 |
+
await _refresh_jwt()
|
| 27 |
+
except Exception:
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
if __name__ == "__main__":
|
| 32 |
+
asyncio.run(main())
|
| 33 |
+
uvicorn.run(
|
| 34 |
+
openai_server,
|
| 35 |
+
host=os.getenv("HOST", "127.0.0.1"),
|
| 36 |
+
port=int(os.getenv("PORT", "8010")),
|
| 37 |
+
log_level="info",
|
| 38 |
+
)
|
uv.lock
ADDED
|
@@ -0,0 +1,863 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version = 1
|
| 2 |
+
revision = 1
|
| 3 |
+
requires-python = ">=3.13"
|
| 4 |
+
|
| 5 |
+
[[package]]
|
| 6 |
+
name = "annotated-types"
|
| 7 |
+
version = "0.7.0"
|
| 8 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 9 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
|
| 10 |
+
wheels = [
|
| 11 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
[[package]]
|
| 15 |
+
name = "anyio"
|
| 16 |
+
version = "4.10.0"
|
| 17 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 18 |
+
dependencies = [
|
| 19 |
+
{ name = "idna" },
|
| 20 |
+
{ name = "sniffio" },
|
| 21 |
+
]
|
| 22 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252 }
|
| 23 |
+
wheels = [
|
| 24 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213 },
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
[[package]]
|
| 28 |
+
name = "certifi"
|
| 29 |
+
version = "2025.8.3"
|
| 30 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 31 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386 }
|
| 32 |
+
wheels = [
|
| 33 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 },
|
| 34 |
+
]
|
| 35 |
+
|
| 36 |
+
[[package]]
|
| 37 |
+
name = "charset-normalizer"
|
| 38 |
+
version = "3.4.3"
|
| 39 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 40 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371 }
|
| 41 |
+
wheels = [
|
| 42 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326 },
|
| 43 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008 },
|
| 44 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196 },
|
| 45 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819 },
|
| 46 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350 },
|
| 47 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644 },
|
| 48 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468 },
|
| 49 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187 },
|
| 50 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699 },
|
| 51 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580 },
|
| 52 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366 },
|
| 53 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342 },
|
| 54 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995 },
|
| 55 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640 },
|
| 56 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636 },
|
| 57 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939 },
|
| 58 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580 },
|
| 59 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870 },
|
| 60 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797 },
|
| 61 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224 },
|
| 62 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086 },
|
| 63 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400 },
|
| 64 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175 },
|
| 65 |
+
]
|
| 66 |
+
|
| 67 |
+
[[package]]
|
| 68 |
+
name = "click"
|
| 69 |
+
version = "8.2.1"
|
| 70 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 71 |
+
dependencies = [
|
| 72 |
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
| 73 |
+
]
|
| 74 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 }
|
| 75 |
+
wheels = [
|
| 76 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 },
|
| 77 |
+
]
|
| 78 |
+
|
| 79 |
+
[[package]]
|
| 80 |
+
name = "colorama"
|
| 81 |
+
version = "0.4.6"
|
| 82 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 83 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
|
| 84 |
+
wheels = [
|
| 85 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
| 86 |
+
]
|
| 87 |
+
|
| 88 |
+
[[package]]
|
| 89 |
+
name = "distro"
|
| 90 |
+
version = "1.9.0"
|
| 91 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 92 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 }
|
| 93 |
+
wheels = [
|
| 94 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 },
|
| 95 |
+
]
|
| 96 |
+
|
| 97 |
+
[[package]]
|
| 98 |
+
name = "dnspython"
|
| 99 |
+
version = "2.7.0"
|
| 100 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 101 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 }
|
| 102 |
+
wheels = [
|
| 103 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 },
|
| 104 |
+
]
|
| 105 |
+
|
| 106 |
+
[[package]]
|
| 107 |
+
name = "email-validator"
|
| 108 |
+
version = "2.2.0"
|
| 109 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 110 |
+
dependencies = [
|
| 111 |
+
{ name = "dnspython" },
|
| 112 |
+
{ name = "idna" },
|
| 113 |
+
]
|
| 114 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 }
|
| 115 |
+
wheels = [
|
| 116 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 },
|
| 117 |
+
]
|
| 118 |
+
|
| 119 |
+
[[package]]
|
| 120 |
+
name = "fastapi"
|
| 121 |
+
version = "0.116.1"
|
| 122 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 123 |
+
dependencies = [
|
| 124 |
+
{ name = "pydantic" },
|
| 125 |
+
{ name = "starlette" },
|
| 126 |
+
{ name = "typing-extensions" },
|
| 127 |
+
]
|
| 128 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485 }
|
| 129 |
+
wheels = [
|
| 130 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631 },
|
| 131 |
+
]
|
| 132 |
+
|
| 133 |
+
[package.optional-dependencies]
|
| 134 |
+
standard = [
|
| 135 |
+
{ name = "email-validator" },
|
| 136 |
+
{ name = "fastapi-cli", extra = ["standard"] },
|
| 137 |
+
{ name = "httpx" },
|
| 138 |
+
{ name = "jinja2" },
|
| 139 |
+
{ name = "python-multipart" },
|
| 140 |
+
{ name = "uvicorn", extra = ["standard"] },
|
| 141 |
+
]
|
| 142 |
+
|
| 143 |
+
[[package]]
|
| 144 |
+
name = "fastapi-cli"
|
| 145 |
+
version = "0.0.8"
|
| 146 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 147 |
+
dependencies = [
|
| 148 |
+
{ name = "rich-toolkit" },
|
| 149 |
+
{ name = "typer" },
|
| 150 |
+
{ name = "uvicorn", extra = ["standard"] },
|
| 151 |
+
]
|
| 152 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/94/3ef75d9c7c32936ecb539b9750ccbdc3d2568efd73b1cb913278375f4533/fastapi_cli-0.0.8.tar.gz", hash = "sha256:2360f2989b1ab4a3d7fc8b3a0b20e8288680d8af2e31de7c38309934d7f8a0ee", size = 16884 }
|
| 153 |
+
wheels = [
|
| 154 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/3f/6ad3103c5f59208baf4c798526daea6a74085bb35d1c161c501863470476/fastapi_cli-0.0.8-py3-none-any.whl", hash = "sha256:0ea95d882c85b9219a75a65ab27e8da17dac02873e456850fa0a726e96e985eb", size = 10770 },
|
| 155 |
+
]
|
| 156 |
+
|
| 157 |
+
[package.optional-dependencies]
|
| 158 |
+
standard = [
|
| 159 |
+
{ name = "fastapi-cloud-cli" },
|
| 160 |
+
{ name = "uvicorn", extra = ["standard"] },
|
| 161 |
+
]
|
| 162 |
+
|
| 163 |
+
[[package]]
|
| 164 |
+
name = "fastapi-cloud-cli"
|
| 165 |
+
version = "0.1.5"
|
| 166 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 167 |
+
dependencies = [
|
| 168 |
+
{ name = "httpx" },
|
| 169 |
+
{ name = "pydantic", extra = ["email"] },
|
| 170 |
+
{ name = "rich-toolkit" },
|
| 171 |
+
{ name = "rignore" },
|
| 172 |
+
{ name = "sentry-sdk" },
|
| 173 |
+
{ name = "typer" },
|
| 174 |
+
{ name = "uvicorn", extra = ["standard"] },
|
| 175 |
+
]
|
| 176 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a9/2e/3b6e5016affc310e5109bc580f760586eabecea0c8a7ab067611cd849ac0/fastapi_cloud_cli-0.1.5.tar.gz", hash = "sha256:341ee585eb731a6d3c3656cb91ad38e5f39809bf1a16d41de1333e38635a7937", size = 22710 }
|
| 177 |
+
wheels = [
|
| 178 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e5/a6/5aa862489a2918a096166fd98d9fe86b7fd53c607678b3fa9d8c432d88d5/fastapi_cloud_cli-0.1.5-py3-none-any.whl", hash = "sha256:d80525fb9c0e8af122370891f9fa83cf5d496e4ad47a8dd26c0496a6c85a012a", size = 18992 },
|
| 179 |
+
]
|
| 180 |
+
|
| 181 |
+
[[package]]
|
| 182 |
+
name = "grpcio"
|
| 183 |
+
version = "1.74.0"
|
| 184 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 185 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048 }
|
| 186 |
+
wheels = [
|
| 187 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488 },
|
| 188 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059 },
|
| 189 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647 },
|
| 190 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101 },
|
| 191 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562 },
|
| 192 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425 },
|
| 193 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533 },
|
| 194 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489 },
|
| 195 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811 },
|
| 196 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214 },
|
| 197 |
+
]
|
| 198 |
+
|
| 199 |
+
[[package]]
|
| 200 |
+
name = "grpcio-tools"
|
| 201 |
+
version = "1.74.0"
|
| 202 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 203 |
+
dependencies = [
|
| 204 |
+
{ name = "grpcio" },
|
| 205 |
+
{ name = "protobuf" },
|
| 206 |
+
{ name = "setuptools" },
|
| 207 |
+
]
|
| 208 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/90/c8/bca79cb8c14bb63027831039919c801db9f593c7504c09433934f5dff6a4/grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95", size = 5390007 }
|
| 209 |
+
wheels = [
|
| 210 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2f/65/307a72cf4bfa553a25e284bd1f27b94a53816ac01ddf432c398117b91b2a/grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e", size = 2545750 },
|
| 211 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/5b/8e/9b2217c15baadc7cfca3eba9f980e147452ca82f41767490f619edea3489/grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb", size = 5838169 },
|
| 212 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ea/42/a6a158b7e91c0a358cddf3f9088b004c2bfa42d1f96154b9b8eb17e16d73/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9", size = 2517140 },
|
| 213 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/05/db/d4576a07b2d1211822a070f76a99a9f4f4cb63496a02964ce77c88df8a28/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd", size = 2905214 },
|
| 214 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/77/dc/3713e75751f862d8c84f823ba935d486c0aac0b6f789fa61fbde04ad5019/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3", size = 2656245 },
|
| 215 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/bd/e4/01f9e8e0401d8e11a70ae8aff6899eb8c16536f69a0a9ffb25873588721c/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593", size = 3052327 },
|
| 216 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/28/c2/264b4e705375a834c9c7462847ae435c0be1644f03a705d3d7464af07bd5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434", size = 3500706 },
|
| 217 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/c0/cc034cec5871a1918e7888e8ce700e06fab5bbb328f998a2f2750cd603b5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef", size = 3125098 },
|
| 218 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/69/55/5792b681af82b3ff1e50ce0ccfbb6d52fc68a13932ed3da57e58d7dfb67b/grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d", size = 992431 },
|
| 219 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/94/9f/626f0fe6bfc1c6917785c6a5ee2eb8c07b5a30771e4bf4cff3c1ab5b431b/grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28", size = 1157064 },
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
[[package]]
|
| 223 |
+
name = "h11"
|
| 224 |
+
version = "0.16.0"
|
| 225 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 226 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 }
|
| 227 |
+
wheels = [
|
| 228 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 },
|
| 229 |
+
]
|
| 230 |
+
|
| 231 |
+
[[package]]
|
| 232 |
+
name = "h2"
|
| 233 |
+
version = "4.2.0"
|
| 234 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 235 |
+
dependencies = [
|
| 236 |
+
{ name = "hpack" },
|
| 237 |
+
{ name = "hyperframe" },
|
| 238 |
+
]
|
| 239 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 }
|
| 240 |
+
wheels = [
|
| 241 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 },
|
| 242 |
+
]
|
| 243 |
+
|
| 244 |
+
[[package]]
|
| 245 |
+
name = "hpack"
|
| 246 |
+
version = "4.1.0"
|
| 247 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 248 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 }
|
| 249 |
+
wheels = [
|
| 250 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 },
|
| 251 |
+
]
|
| 252 |
+
|
| 253 |
+
[[package]]
|
| 254 |
+
name = "httpcore"
|
| 255 |
+
version = "1.0.9"
|
| 256 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 257 |
+
dependencies = [
|
| 258 |
+
{ name = "certifi" },
|
| 259 |
+
{ name = "h11" },
|
| 260 |
+
]
|
| 261 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 }
|
| 262 |
+
wheels = [
|
| 263 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 },
|
| 264 |
+
]
|
| 265 |
+
|
| 266 |
+
[[package]]
|
| 267 |
+
name = "httptools"
|
| 268 |
+
version = "0.6.4"
|
| 269 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 270 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 }
|
| 271 |
+
wheels = [
|
| 272 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 },
|
| 273 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 },
|
| 274 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 },
|
| 275 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 },
|
| 276 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 },
|
| 277 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 },
|
| 278 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 },
|
| 279 |
+
]
|
| 280 |
+
|
| 281 |
+
[[package]]
|
| 282 |
+
name = "httpx"
|
| 283 |
+
version = "0.28.1"
|
| 284 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 285 |
+
dependencies = [
|
| 286 |
+
{ name = "anyio" },
|
| 287 |
+
{ name = "certifi" },
|
| 288 |
+
{ name = "httpcore" },
|
| 289 |
+
{ name = "idna" },
|
| 290 |
+
]
|
| 291 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
|
| 292 |
+
wheels = [
|
| 293 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
|
| 294 |
+
]
|
| 295 |
+
|
| 296 |
+
[package.optional-dependencies]
|
| 297 |
+
http2 = [
|
| 298 |
+
{ name = "h2" },
|
| 299 |
+
]
|
| 300 |
+
|
| 301 |
+
[[package]]
|
| 302 |
+
name = "hyperframe"
|
| 303 |
+
version = "6.1.0"
|
| 304 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 305 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 }
|
| 306 |
+
wheels = [
|
| 307 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 },
|
| 308 |
+
]
|
| 309 |
+
|
| 310 |
+
[[package]]
|
| 311 |
+
name = "idna"
|
| 312 |
+
version = "3.10"
|
| 313 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 314 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
| 315 |
+
wheels = [
|
| 316 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
| 317 |
+
]
|
| 318 |
+
|
| 319 |
+
[[package]]
|
| 320 |
+
name = "jinja2"
|
| 321 |
+
version = "3.1.6"
|
| 322 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 323 |
+
dependencies = [
|
| 324 |
+
{ name = "markupsafe" },
|
| 325 |
+
]
|
| 326 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
|
| 327 |
+
wheels = [
|
| 328 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
|
| 329 |
+
]
|
| 330 |
+
|
| 331 |
+
[[package]]
|
| 332 |
+
name = "jiter"
|
| 333 |
+
version = "0.10.0"
|
| 334 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 335 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759 }
|
| 336 |
+
wheels = [
|
| 337 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617 },
|
| 338 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947 },
|
| 339 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618 },
|
| 340 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829 },
|
| 341 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034 },
|
| 342 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529 },
|
| 343 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671 },
|
| 344 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864 },
|
| 345 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989 },
|
| 346 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495 },
|
| 347 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289 },
|
| 348 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074 },
|
| 349 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225 },
|
| 350 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235 },
|
| 351 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278 },
|
| 352 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866 },
|
| 353 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772 },
|
| 354 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534 },
|
| 355 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087 },
|
| 356 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694 },
|
| 357 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992 },
|
| 358 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723 },
|
| 359 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215 },
|
| 360 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762 },
|
| 361 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427 },
|
| 362 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127 },
|
| 363 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527 },
|
| 364 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213 },
|
| 365 |
+
]
|
| 366 |
+
|
| 367 |
+
[[package]]
|
| 368 |
+
name = "markdown-it-py"
|
| 369 |
+
version = "4.0.0"
|
| 370 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 371 |
+
dependencies = [
|
| 372 |
+
{ name = "mdurl" },
|
| 373 |
+
]
|
| 374 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 }
|
| 375 |
+
wheels = [
|
| 376 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 },
|
| 377 |
+
]
|
| 378 |
+
|
| 379 |
+
[[package]]
|
| 380 |
+
name = "markupsafe"
|
| 381 |
+
version = "3.0.2"
|
| 382 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 383 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
|
| 384 |
+
wheels = [
|
| 385 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 },
|
| 386 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 },
|
| 387 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 },
|
| 388 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 },
|
| 389 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 },
|
| 390 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 },
|
| 391 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 },
|
| 392 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 },
|
| 393 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 },
|
| 394 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 },
|
| 395 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 },
|
| 396 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 },
|
| 397 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 },
|
| 398 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 },
|
| 399 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 },
|
| 400 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 },
|
| 401 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 },
|
| 402 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 },
|
| 403 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 },
|
| 404 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 },
|
| 405 |
+
]
|
| 406 |
+
|
| 407 |
+
[[package]]
|
| 408 |
+
name = "mdurl"
|
| 409 |
+
version = "0.1.2"
|
| 410 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 411 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
|
| 412 |
+
wheels = [
|
| 413 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
|
| 414 |
+
]
|
| 415 |
+
|
| 416 |
+
[[package]]
|
| 417 |
+
name = "openai"
|
| 418 |
+
version = "1.106.0"
|
| 419 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 420 |
+
dependencies = [
|
| 421 |
+
{ name = "anyio" },
|
| 422 |
+
{ name = "distro" },
|
| 423 |
+
{ name = "httpx" },
|
| 424 |
+
{ name = "jiter" },
|
| 425 |
+
{ name = "pydantic" },
|
| 426 |
+
{ name = "sniffio" },
|
| 427 |
+
{ name = "tqdm" },
|
| 428 |
+
{ name = "typing-extensions" },
|
| 429 |
+
]
|
| 430 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/11/3a/ef6c6842ea4df48453f5ff7b624178273d9137acb318afba3872a5f3da49/openai-1.106.0.tar.gz", hash = "sha256:8c5ae2ae61a619cd8ba22aeda8fdff00428280041eff5be5555287634ea6f460", size = 561133 }
|
| 431 |
+
wheels = [
|
| 432 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/01/66/465e4e8095becd1cd8c0a32283d530e7866f434031eafdc93cc1f04869d7/openai-1.106.0-py3-none-any.whl", hash = "sha256:47bf9d07df203cd2b7f90ac2da84aea40340dbdebb2da2f4f70e3a133c605d57", size = 930767 },
|
| 433 |
+
]
|
| 434 |
+
|
| 435 |
+
[[package]]
|
| 436 |
+
name = "protobuf"
|
| 437 |
+
version = "6.32.0"
|
| 438 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 439 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614 }
|
| 440 |
+
wheels = [
|
| 441 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409 },
|
| 442 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735 },
|
| 443 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449 },
|
| 444 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869 },
|
| 445 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009 },
|
| 446 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287 },
|
| 447 |
+
]
|
| 448 |
+
|
| 449 |
+
[[package]]
|
| 450 |
+
name = "pydantic"
|
| 451 |
+
version = "2.11.7"
|
| 452 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 453 |
+
dependencies = [
|
| 454 |
+
{ name = "annotated-types" },
|
| 455 |
+
{ name = "pydantic-core" },
|
| 456 |
+
{ name = "typing-extensions" },
|
| 457 |
+
{ name = "typing-inspection" },
|
| 458 |
+
]
|
| 459 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350 }
|
| 460 |
+
wheels = [
|
| 461 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782 },
|
| 462 |
+
]
|
| 463 |
+
|
| 464 |
+
[package.optional-dependencies]
|
| 465 |
+
email = [
|
| 466 |
+
{ name = "email-validator" },
|
| 467 |
+
]
|
| 468 |
+
|
| 469 |
+
[[package]]
|
| 470 |
+
name = "pydantic-core"
|
| 471 |
+
version = "2.33.2"
|
| 472 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 473 |
+
dependencies = [
|
| 474 |
+
{ name = "typing-extensions" },
|
| 475 |
+
]
|
| 476 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 }
|
| 477 |
+
wheels = [
|
| 478 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 },
|
| 479 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 },
|
| 480 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 },
|
| 481 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 },
|
| 482 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 },
|
| 483 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 },
|
| 484 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 },
|
| 485 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 },
|
| 486 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 },
|
| 487 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 },
|
| 488 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 },
|
| 489 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 },
|
| 490 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 },
|
| 491 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 },
|
| 492 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 },
|
| 493 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 },
|
| 494 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 },
|
| 495 |
+
]
|
| 496 |
+
|
| 497 |
+
[[package]]
|
| 498 |
+
name = "pygments"
|
| 499 |
+
version = "2.19.2"
|
| 500 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 501 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 }
|
| 502 |
+
wheels = [
|
| 503 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 },
|
| 504 |
+
]
|
| 505 |
+
|
| 506 |
+
[[package]]
|
| 507 |
+
name = "python-dotenv"
|
| 508 |
+
version = "1.1.1"
|
| 509 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 510 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 }
|
| 511 |
+
wheels = [
|
| 512 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 },
|
| 513 |
+
]
|
| 514 |
+
|
| 515 |
+
[[package]]
|
| 516 |
+
name = "python-multipart"
|
| 517 |
+
version = "0.0.20"
|
| 518 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 519 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 }
|
| 520 |
+
wheels = [
|
| 521 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
|
| 522 |
+
]
|
| 523 |
+
|
| 524 |
+
[[package]]
|
| 525 |
+
name = "pyyaml"
|
| 526 |
+
version = "6.0.2"
|
| 527 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 528 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
|
| 529 |
+
wheels = [
|
| 530 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 },
|
| 531 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 },
|
| 532 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 },
|
| 533 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 },
|
| 534 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 },
|
| 535 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 },
|
| 536 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 },
|
| 537 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 },
|
| 538 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 },
|
| 539 |
+
]
|
| 540 |
+
|
| 541 |
+
[[package]]
|
| 542 |
+
name = "requests"
|
| 543 |
+
version = "2.32.5"
|
| 544 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 545 |
+
dependencies = [
|
| 546 |
+
{ name = "certifi" },
|
| 547 |
+
{ name = "charset-normalizer" },
|
| 548 |
+
{ name = "idna" },
|
| 549 |
+
{ name = "urllib3" },
|
| 550 |
+
]
|
| 551 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 }
|
| 552 |
+
wheels = [
|
| 553 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 },
|
| 554 |
+
]
|
| 555 |
+
|
| 556 |
+
[[package]]
|
| 557 |
+
name = "rich"
|
| 558 |
+
version = "14.1.0"
|
| 559 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 560 |
+
dependencies = [
|
| 561 |
+
{ name = "markdown-it-py" },
|
| 562 |
+
{ name = "pygments" },
|
| 563 |
+
]
|
| 564 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441 }
|
| 565 |
+
wheels = [
|
| 566 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368 },
|
| 567 |
+
]
|
| 568 |
+
|
| 569 |
+
[[package]]
|
| 570 |
+
name = "rich-toolkit"
|
| 571 |
+
version = "0.15.0"
|
| 572 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 573 |
+
dependencies = [
|
| 574 |
+
{ name = "click" },
|
| 575 |
+
{ name = "rich" },
|
| 576 |
+
{ name = "typing-extensions" },
|
| 577 |
+
]
|
| 578 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/65/36/cdb3d51371ad0cccbf1541506304783bd72d55790709b8eb68c0d401a13a/rich_toolkit-0.15.0.tar.gz", hash = "sha256:3f5730e9f2d36d0bfe01cf723948b7ecf4cc355d2b71e2c00e094f7963128c09", size = 115118 }
|
| 579 |
+
wheels = [
|
| 580 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/75/e4/b0794eefb3cf78566b15e5bf576492c1d4a92ce5f6da55675bc11e9ef5d8/rich_toolkit-0.15.0-py3-none-any.whl", hash = "sha256:ddb91008283d4a7989fd8ff0324a48773a7a2276229c6a3070755645538ef1bb", size = 29062 },
|
| 581 |
+
]
|
| 582 |
+
|
| 583 |
+
[[package]]
|
| 584 |
+
name = "rignore"
|
| 585 |
+
version = "0.6.4"
|
| 586 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 587 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/73/46/05a94dc55ac03cf931d18e43b86ecee5ee054cb88b7853fffd741e35009c/rignore-0.6.4.tar.gz", hash = "sha256:e893fdd2d7fdcfa9407d0b7600ef2c2e2df97f55e1c45d4a8f54364829ddb0ab", size = 11633 }
|
| 588 |
+
wheels = [
|
| 589 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/db/a3/edd7d0d5cc0720de132b6651cef95ee080ce5fca11c77d8a47db848e5f90/rignore-0.6.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2b3b1e266ce45189240d14dfa1057f8013ea34b9bc8b3b44125ec8d25fdb3985", size = 885304 },
|
| 590 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/93/a1/d8d2fb97a6548307507d049b7e93885d4a0dfa1c907af5983fd9f9362a21/rignore-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45fe803628cc14714df10e8d6cdc23950a47eb9eb37dfea9a4779f4c672d2aa0", size = 818799 },
|
| 591 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b1/cd/949981fcc180ad5ba7b31c52e78b74b2dea6b7bf744ad4c0c4b212f6da78/rignore-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e439f034277a947a4126e2da79dbb43e33d73d7c09d3d72a927e02f8a16f59aa", size = 892024 },
|
| 592 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/d3/9042d701a8062d9c88f87760bbc2695ee2c23b3f002d34486b72a85f8efe/rignore-0.6.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b5121650ae24621154c7bdba8b8970b0739d8146505c9f38e0cda9385d1004", size = 871430 },
|
| 593 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/eb/50/3370249b984212b7355f3d9241aa6d02e706067c6d194a2614dfbc0f5b27/rignore-0.6.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b0957b585ab48a445cf8ac1dbc33a272ab060835e583b4f95aa8c67c23fb2b", size = 1160559 },
|
| 594 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6c/6f/2ad7f925838091d065524f30a8abda846d1813eee93328febf262b5cda21/rignore-0.6.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50359e0d5287b5e2743bd2f2fbf05df619c8282fd3af12f6628ff97b9675551d", size = 939947 },
|
| 595 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1f/01/626ec94d62475ae7ef8b00ef98cea61cbea52a389a666703c97c4673d406/rignore-0.6.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efe18096dcb1596757dfe0b412aab6d32564473ae7ee58dea0a8b4be5b1a2e3b", size = 949471 },
|
| 596 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e8/c3/699c4f03b3c46f4b5c02f17a0a339225da65aad547daa5b03001e7c6a382/rignore-0.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b79c212d9990a273ad91e8d9765e1766ef6ecedd3be65375d786a252762ba385", size = 974912 },
|
| 597 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/cd/35/04626c12f9f92a9fc789afc2be32838a5d9b23b6fa8b2ad4a8625638d15b/rignore-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6ffa7f2a8894c65aa5dc4e8ac8bbdf39a326c0c6589efd27686cfbb48f0197d", size = 1067281 },
|
| 598 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/9c/8f17baf3b984afea151cb9094716f6f1fb8e8737db97fc6eb6d494bd0780/rignore-0.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a63f5720dffc8d8fb0a4d02fafb8370a4031ebf3f99a4e79f334a91e905b7349", size = 1134414 },
|
| 599 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/10/88/ef84ffa916a96437c12cefcc39d474122da9626d75e3a2ebe09ec5d32f1b/rignore-0.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ce33982da47ac5dc09d19b04fa8d7c9aa6292fc0bd1ecf33076989faa8886094", size = 1109330 },
|
| 600 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/27/43/2ada5a2ec03b82e903610a1c483f516f78e47700ee6db9823f739e08b3af/rignore-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d899621867aa266824fbd9150e298f19d25b93903ef0133c09f70c65a3416eca", size = 1120381 },
|
| 601 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/99/e7bcc643085131cb14dbea772def72bf1f6fe9037171ebe177c4f228abc8/rignore-0.6.4-cp313-cp313-win32.whl", hash = "sha256:d0615a6bf4890ec5a90b5fb83666822088fbd4e8fcd740c386fcce51e2f6feea", size = 641761 },
|
| 602 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d9/25/7798908044f27dea1a8abdc75c14523e33770137651e5f775a15143f4218/rignore-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:145177f0e32716dc2f220b07b3cde2385b994b7ea28d5c96fbec32639e9eac6f", size = 719876 },
|
| 603 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b4/e3/ae1e30b045bf004ad77bbd1679b9afff2be8edb166520921c6f29420516a/rignore-0.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e55bf8f9bbd186f58ab646b4a08718c77131d28a9004e477612b0cbbd5202db2", size = 891776 },
|
| 604 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/45/a9/1193e3bc23ca0e6eb4f17cf4b99971237f97cfa6f241d98366dff90a6d09/rignore-0.6.4-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2521f7bf3ee1f2ab22a100a3a4eed39a97b025804e5afe4323528e9ce8f084a5", size = 871442 },
|
| 605 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/20/83/4c52ae429a0b2e1ce667e35b480e9a6846f9468c443baeaed5d775af9485/rignore-0.6.4-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cc35773a8a9c119359ef974d0856988d4601d4daa6f532c05f66b4587cf35bc", size = 1159844 },
|
| 606 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c1/2f/c740f5751f464c937bfe252dc15a024ae081352cfe80d94aa16d6a617482/rignore-0.6.4-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b665b1ea14457d7b49e834baabc635a3b8c10cfb5cca5c21161fabdbfc2b850e", size = 939456 },
|
| 607 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fc/dd/68dbb08ac0edabf44dd144ff546a3fb0253c5af708e066847df39fc9188f/rignore-0.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c7fd339f344a8548724f289495b835bed7b81174a0bc1c28c6497854bd8855db", size = 1067070 },
|
| 608 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3b/3a/7e7ea6f0d31d3f5beb0f2cf2c4c362672f5f7f125714458673fc579e2bed/rignore-0.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:91dc94b1cc5af8d6d25ce6edd29e7351830f19b0a03b75cb3adf1f76d00f3007", size = 1134598 },
|
| 609 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/7e/06/1b3307f6437d29bede5a95738aa89e6d910ba68d4054175c9f60d8e2c6b1/rignore-0.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4d1918221a249e5342b60fd5fa513bf3d6bf272a8738e66023799f0c82ecd788", size = 1108862 },
|
| 610 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b0/d5/b37c82519f335f2c472a63fc6215c6f4c51063ecf3166e3acf508011afbd/rignore-0.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:240777332b859dc89dcba59ab6e3f1e062bc8e862ffa3e5f456e93f7fd5cb415", size = 1120002 },
|
| 611 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ac/72/2f05559ed5e69bdfdb56ea3982b48e6c0017c59f7241f7e1c5cae992b347/rignore-0.6.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b0e548753e55cc648f1e7b02d9f74285fe48bb49cec93643d31e563773ab3f", size = 949454 },
|
| 612 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0b/92/186693c8f838d670510ac1dfb35afbe964320fbffb343ba18f3d24441941/rignore-0.6.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6971ac9fdd5a0bd299a181096f091c4f3fd286643adceba98eccc03c688a6637", size = 974663 },
|
| 613 |
+
]
|
| 614 |
+
|
| 615 |
+
[[package]]
|
| 616 |
+
name = "sentry-sdk"
|
| 617 |
+
version = "2.35.0"
|
| 618 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 619 |
+
dependencies = [
|
| 620 |
+
{ name = "certifi" },
|
| 621 |
+
{ name = "urllib3" },
|
| 622 |
+
]
|
| 623 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/31/83/055dc157b719651ef13db569bb8cf2103df11174478649735c1b2bf3f6bc/sentry_sdk-2.35.0.tar.gz", hash = "sha256:5ea58d352779ce45d17bc2fa71ec7185205295b83a9dbb5707273deb64720092", size = 343014 }
|
| 624 |
+
wheels = [
|
| 625 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/36/3d/742617a7c644deb0c1628dcf6bb2d2165ab7c6aab56fe5222758994007f8/sentry_sdk-2.35.0-py2.py3-none-any.whl", hash = "sha256:6e0c29b9a5d34de8575ffb04d289a987ff3053cf2c98ede445bea995e3830263", size = 363806 },
|
| 626 |
+
]
|
| 627 |
+
|
| 628 |
+
[[package]]
|
| 629 |
+
name = "setuptools"
|
| 630 |
+
version = "80.9.0"
|
| 631 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 632 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 }
|
| 633 |
+
wheels = [
|
| 634 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 },
|
| 635 |
+
]
|
| 636 |
+
|
| 637 |
+
[[package]]
|
| 638 |
+
name = "shellingham"
|
| 639 |
+
version = "1.5.4"
|
| 640 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 641 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 }
|
| 642 |
+
wheels = [
|
| 643 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
|
| 644 |
+
]
|
| 645 |
+
|
| 646 |
+
[[package]]
|
| 647 |
+
name = "sniffio"
|
| 648 |
+
version = "1.3.1"
|
| 649 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 650 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
|
| 651 |
+
wheels = [
|
| 652 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
|
| 653 |
+
]
|
| 654 |
+
|
| 655 |
+
[[package]]
|
| 656 |
+
name = "starlette"
|
| 657 |
+
version = "0.47.2"
|
| 658 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 659 |
+
dependencies = [
|
| 660 |
+
{ name = "anyio" },
|
| 661 |
+
]
|
| 662 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948 }
|
| 663 |
+
wheels = [
|
| 664 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984 },
|
| 665 |
+
]
|
| 666 |
+
|
| 667 |
+
[[package]]
|
| 668 |
+
name = "tqdm"
|
| 669 |
+
version = "4.67.1"
|
| 670 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 671 |
+
dependencies = [
|
| 672 |
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
| 673 |
+
]
|
| 674 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
|
| 675 |
+
wheels = [
|
| 676 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 },
|
| 677 |
+
]
|
| 678 |
+
|
| 679 |
+
[[package]]
|
| 680 |
+
name = "typer"
|
| 681 |
+
version = "0.16.0"
|
| 682 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 683 |
+
dependencies = [
|
| 684 |
+
{ name = "click" },
|
| 685 |
+
{ name = "rich" },
|
| 686 |
+
{ name = "shellingham" },
|
| 687 |
+
{ name = "typing-extensions" },
|
| 688 |
+
]
|
| 689 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625 }
|
| 690 |
+
wheels = [
|
| 691 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317 },
|
| 692 |
+
]
|
| 693 |
+
|
| 694 |
+
[[package]]
|
| 695 |
+
name = "typing-extensions"
|
| 696 |
+
version = "4.14.1"
|
| 697 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 698 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673 }
|
| 699 |
+
wheels = [
|
| 700 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906 },
|
| 701 |
+
]
|
| 702 |
+
|
| 703 |
+
[[package]]
|
| 704 |
+
name = "typing-inspection"
|
| 705 |
+
version = "0.4.1"
|
| 706 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 707 |
+
dependencies = [
|
| 708 |
+
{ name = "typing-extensions" },
|
| 709 |
+
]
|
| 710 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 }
|
| 711 |
+
wheels = [
|
| 712 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 },
|
| 713 |
+
]
|
| 714 |
+
|
| 715 |
+
[[package]]
|
| 716 |
+
name = "urllib3"
|
| 717 |
+
version = "2.5.0"
|
| 718 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 719 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 }
|
| 720 |
+
wheels = [
|
| 721 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 },
|
| 722 |
+
]
|
| 723 |
+
|
| 724 |
+
[[package]]
|
| 725 |
+
name = "uvicorn"
|
| 726 |
+
version = "0.35.0"
|
| 727 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 728 |
+
dependencies = [
|
| 729 |
+
{ name = "click" },
|
| 730 |
+
{ name = "h11" },
|
| 731 |
+
]
|
| 732 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473 }
|
| 733 |
+
wheels = [
|
| 734 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406 },
|
| 735 |
+
]
|
| 736 |
+
|
| 737 |
+
[package.optional-dependencies]
|
| 738 |
+
standard = [
|
| 739 |
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
| 740 |
+
{ name = "httptools" },
|
| 741 |
+
{ name = "python-dotenv" },
|
| 742 |
+
{ name = "pyyaml" },
|
| 743 |
+
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
|
| 744 |
+
{ name = "watchfiles" },
|
| 745 |
+
{ name = "websockets" },
|
| 746 |
+
]
|
| 747 |
+
|
| 748 |
+
[[package]]
|
| 749 |
+
name = "uvloop"
|
| 750 |
+
version = "0.21.0"
|
| 751 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 752 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 }
|
| 753 |
+
wheels = [
|
| 754 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 },
|
| 755 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 },
|
| 756 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 },
|
| 757 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 },
|
| 758 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 },
|
| 759 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 },
|
| 760 |
+
]
|
| 761 |
+
|
| 762 |
+
[[package]]
|
| 763 |
+
name = "warptestui"
|
| 764 |
+
version = "0.1.0"
|
| 765 |
+
source = { virtual = "." }
|
| 766 |
+
dependencies = [
|
| 767 |
+
{ name = "fastapi", extra = ["standard"] },
|
| 768 |
+
{ name = "grpcio-tools" },
|
| 769 |
+
{ name = "httpx", extra = ["http2"] },
|
| 770 |
+
{ name = "openai" },
|
| 771 |
+
{ name = "protobuf" },
|
| 772 |
+
{ name = "python-dotenv" },
|
| 773 |
+
{ name = "requests" },
|
| 774 |
+
{ name = "uvicorn", extra = ["standard"] },
|
| 775 |
+
{ name = "websockets" },
|
| 776 |
+
]
|
| 777 |
+
|
| 778 |
+
[package.metadata]
|
| 779 |
+
requires-dist = [
|
| 780 |
+
{ name = "fastapi", extras = ["standard"] },
|
| 781 |
+
{ name = "grpcio-tools" },
|
| 782 |
+
{ name = "httpx", extras = ["http2"] },
|
| 783 |
+
{ name = "openai", specifier = ">=1.106.0" },
|
| 784 |
+
{ name = "protobuf" },
|
| 785 |
+
{ name = "python-dotenv" },
|
| 786 |
+
{ name = "requests", specifier = ">=2.32.5" },
|
| 787 |
+
{ name = "uvicorn", extras = ["standard"] },
|
| 788 |
+
{ name = "websockets", specifier = ">=15.0.1" },
|
| 789 |
+
]
|
| 790 |
+
|
| 791 |
+
[[package]]
|
| 792 |
+
name = "watchfiles"
|
| 793 |
+
version = "1.1.0"
|
| 794 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 795 |
+
dependencies = [
|
| 796 |
+
{ name = "anyio" },
|
| 797 |
+
]
|
| 798 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406 }
|
| 799 |
+
wheels = [
|
| 800 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004 },
|
| 801 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671 },
|
| 802 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772 },
|
| 803 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/05/46dd1f6879bc40e1e74c6c39a1b9ab9e790bf1f5a2fe6c08b463d9a807f4/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b", size = 456789 },
|
| 804 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8b/ca/0eeb2c06227ca7f12e50a47a3679df0cd1ba487ea19cf844a905920f8e95/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895", size = 482551 },
|
| 805 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/31/47/2cecbd8694095647406645f822781008cc524320466ea393f55fe70eed3b/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a", size = 597420 },
|
| 806 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d9/7e/82abc4240e0806846548559d70f0b1a6dfdca75c1b4f9fa62b504ae9b083/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b", size = 477950 },
|
| 807 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/25/0d/4d564798a49bf5482a4fa9416dea6b6c0733a3b5700cb8a5a503c4b15853/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c", size = 451706 },
|
| 808 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/81/b5/5516cf46b033192d544102ea07c65b6f770f10ed1d0a6d388f5d3874f6e4/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b", size = 625814 },
|
| 809 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0c/dd/7c1331f902f30669ac3e754680b6edb9a0dd06dea5438e61128111fadd2c/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb", size = 622820 },
|
| 810 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/14/36d7a8e27cd128d7b1009e7715a7c02f6c131be9d4ce1e5c3b73d0e342d8/watchfiles-1.1.0-cp313-cp313-win32.whl", hash = "sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9", size = 279194 },
|
| 811 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/25/41/2dd88054b849aa546dbeef5696019c58f8e0774f4d1c42123273304cdb2e/watchfiles-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7", size = 292349 },
|
| 812 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c8/cf/421d659de88285eb13941cf11a81f875c176f76a6d99342599be88e08d03/watchfiles-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5", size = 283836 },
|
| 813 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/45/10/6faf6858d527e3599cc50ec9fcae73590fbddc1420bd4fdccfebffeedbc6/watchfiles-1.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1", size = 400343 },
|
| 814 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/03/20/5cb7d3966f5e8c718006d0e97dfe379a82f16fecd3caa7810f634412047a/watchfiles-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339", size = 392916 },
|
| 815 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8c/07/d8f1176328fa9e9581b6f120b017e286d2a2d22ae3f554efd9515c8e1b49/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633", size = 449582 },
|
| 816 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/66/e8/80a14a453cf6038e81d072a86c05276692a1826471fef91df7537dba8b46/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011", size = 456752 },
|
| 817 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/5a/25/0853b3fe0e3c2f5af9ea60eb2e781eade939760239a72c2d38fc4cc335f6/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670", size = 481436 },
|
| 818 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/9e/4af0056c258b861fbb29dcb36258de1e2b857be4a9509e6298abcf31e5c9/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf", size = 596016 },
|
| 819 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c5/fa/95d604b58aa375e781daf350897aaaa089cff59d84147e9ccff2447c8294/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4", size = 476727 },
|
| 820 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/65/95/fe479b2664f19be4cf5ceeb21be05afd491d95f142e72d26a42f41b7c4f8/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20", size = 451864 },
|
| 821 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d3/8a/3c4af14b93a15ce55901cd7a92e1a4701910f1768c78fb30f61d2b79785b/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef", size = 625626 },
|
| 822 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/da/f5/cf6aa047d4d9e128f4b7cde615236a915673775ef171ff85971d698f3c2c/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb", size = 622744 },
|
| 823 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2c/00/70f75c47f05dea6fd30df90f047765f6fc2d6eb8b5a3921379b0b04defa2/watchfiles-1.1.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9974d2f7dc561cce3bb88dfa8eb309dab64c729de85fba32e98d75cf24b66297", size = 402114 },
|
| 824 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/53/03/acd69c48db4a1ed1de26b349d94077cca2238ff98fd64393f3e97484cae6/watchfiles-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c68e9f1fcb4d43798ad8814c4c1b61547b014b667216cb754e606bfade587018", size = 393879 },
|
| 825 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/2f/c8/a9a2a6f9c8baa4eceae5887fecd421e1b7ce86802bcfc8b6a942e2add834/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95ab1594377effac17110e1352989bdd7bdfca9ff0e5eeccd8c69c5389b826d0", size = 450026 },
|
| 826 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fe/51/d572260d98388e6e2b967425c985e07d47ee6f62e6455cefb46a6e06eda5/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fba9b62da882c1be1280a7584ec4515d0a6006a94d6e5819730ec2eab60ffe12", size = 457917 },
|
| 827 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/c6/2d/4258e52917bf9f12909b6ec314ff9636276f3542f9d3807d143f27309104/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3434e401f3ce0ed6b42569128b3d1e3af773d7ec18751b918b89cd49c14eaafb", size = 483602 },
|
| 828 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/84/99/bee17a5f341a4345fe7b7972a475809af9e528deba056f8963d61ea49f75/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa257a4d0d21fcbca5b5fcba9dca5a78011cb93c0323fb8855c6d2dfbc76eb77", size = 596758 },
|
| 829 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/40/76/e4bec1d59b25b89d2b0716b41b461ed655a9a53c60dc78ad5771fda5b3e6/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd1b3879a578a8ec2076c7961076df540b9af317123f84569f5a9ddee64ce92", size = 477601 },
|
| 830 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1f/fa/a514292956f4a9ce3c567ec0c13cce427c158e9f272062685a8a727d08fc/watchfiles-1.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc7a30eeb0e20ecc5f4bd113cd69dcdb745a07c68c0370cea919f373f65d9e", size = 451936 },
|
| 831 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/32/5d/c3bf927ec3bbeb4566984eba8dd7a8eb69569400f5509904545576741f88/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:891c69e027748b4a73847335d208e374ce54ca3c335907d381fde4e41661b13b", size = 626243 },
|
| 832 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e6/65/6e12c042f1a68c556802a84d54bb06d35577c81e29fba14019562479159c/watchfiles-1.1.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:12fe8eaffaf0faa7906895b4f8bb88264035b3f0243275e0bf24af0436b27259", size = 623073 },
|
| 833 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/89/ab/7f79d9bf57329e7cbb0a6fd4c7bd7d0cee1e4a8ef0041459f5409da3506c/watchfiles-1.1.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bfe3c517c283e484843cb2e357dd57ba009cff351edf45fb455b5fbd1f45b15f", size = 400872 },
|
| 834 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/df/d5/3f7bf9912798e9e6c516094db6b8932df53b223660c781ee37607030b6d3/watchfiles-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a9ccbf1f129480ed3044f540c0fdbc4ee556f7175e5ab40fe077ff6baf286d4e", size = 392877 },
|
| 835 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0d/c5/54ec7601a2798604e01c75294770dbee8150e81c6e471445d7601610b495/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba0e3255b0396cac3cc7bbace76404dd72b5438bf0d8e7cefa2f79a7f3649caa", size = 449645 },
|
| 836 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/0a/04/c2f44afc3b2fce21ca0b7802cbd37ed90a29874f96069ed30a36dfe57c2b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4281cd9fce9fc0a9dbf0fc1217f39bf9cf2b4d315d9626ef1d4e87b84699e7e8", size = 457424 },
|
| 837 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/9f/b0/eec32cb6c14d248095261a04f290636da3df3119d4040ef91a4a50b29fa5/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d2404af8db1329f9a3c9b79ff63e0ae7131986446901582067d9304ae8aaf7f", size = 481584 },
|
| 838 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/d1/e2/ca4bb71c68a937d7145aa25709e4f5d68eb7698a25ce266e84b55d591bbd/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e78b6ed8165996013165eeabd875c5dfc19d41b54f94b40e9fff0eb3193e5e8e", size = 596675 },
|
| 839 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/a1/dd/b0e4b7fb5acf783816bc950180a6cd7c6c1d2cf7e9372c0ea634e722712b/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:249590eb75ccc117f488e2fabd1bfa33c580e24b96f00658ad88e38844a040bb", size = 477363 },
|
| 840 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/69/c4/088825b75489cb5b6a761a4542645718893d395d8c530b38734f19da44d2/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05686b5487cfa2e2c28ff1aa370ea3e6c5accfe6435944ddea1e10d93872147", size = 452240 },
|
| 841 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/10/8c/22b074814970eeef43b7c44df98c3e9667c1f7bf5b83e0ff0201b0bd43f9/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d0e10e6f8f6dc5762adee7dece33b722282e1f59aa6a55da5d493a97282fedd8", size = 625607 },
|
| 842 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315 },
|
| 843 |
+
]
|
| 844 |
+
|
| 845 |
+
[[package]]
|
| 846 |
+
name = "websockets"
|
| 847 |
+
version = "15.0.1"
|
| 848 |
+
source = { registry = "https://mirrors.ustc.edu.cn/pypi/simple" }
|
| 849 |
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 }
|
| 850 |
+
wheels = [
|
| 851 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 },
|
| 852 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 },
|
| 853 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 },
|
| 854 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 },
|
| 855 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 },
|
| 856 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 },
|
| 857 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 },
|
| 858 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 },
|
| 859 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 },
|
| 860 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 },
|
| 861 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 },
|
| 862 |
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 },
|
| 863 |
+
]
|
warp2protobuf/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Re-exported compatibility package for legacy src.* modules
|
| 2 |
+
# This package proxies to existing code under src to enable gradual migration.
|
| 3 |
+
|
| 4 |
+
__all__ = []
|
warp2protobuf/api/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# API subpackage for warp2protobuf
|
| 2 |
+
|
| 3 |
+
__all__ = []
|
warp2protobuf/api/protobuf_routes.py
ADDED
|
@@ -0,0 +1,623 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Protobuf编解码API路由
|
| 5 |
+
|
| 6 |
+
提供纯protobuf数据包编解码服务,包括JWT管理和WebSocket支持。
|
| 7 |
+
"""
|
| 8 |
+
import json
|
| 9 |
+
import base64
|
| 10 |
+
import asyncio
|
| 11 |
+
import httpx
|
| 12 |
+
from typing import Any, Dict, List, Optional
|
| 13 |
+
from datetime import datetime
|
| 14 |
+
|
| 15 |
+
from fastapi import FastAPI, Request, HTTPException, WebSocket, WebSocketDisconnect, Query
|
| 16 |
+
from fastapi.responses import JSONResponse
|
| 17 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 18 |
+
from pydantic import BaseModel
|
| 19 |
+
|
| 20 |
+
from ..core.logging import logger
|
| 21 |
+
from ..core.protobuf_utils import protobuf_to_dict, dict_to_protobuf_bytes
|
| 22 |
+
from ..core.auth import get_jwt_token, refresh_jwt_if_needed, is_token_expired, get_valid_jwt, acquire_anonymous_access_token
|
| 23 |
+
from ..core.stream_processor import get_stream_processor, set_websocket_manager
|
| 24 |
+
from ..config.models import get_all_unique_models
|
| 25 |
+
from ..config.settings import CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION, WARP_URL as CONFIG_WARP_URL
|
| 26 |
+
from ..core.server_message_data import decode_server_message_data, encode_server_message_data
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _encode_smd_inplace(obj: Any) -> Any:
|
| 30 |
+
if isinstance(obj, dict):
|
| 31 |
+
new_d = {}
|
| 32 |
+
for k, v in obj.items():
|
| 33 |
+
if k in ("server_message_data", "serverMessageData") and isinstance(v, dict):
|
| 34 |
+
try:
|
| 35 |
+
b64 = encode_server_message_data(
|
| 36 |
+
uuid=v.get("uuid"),
|
| 37 |
+
seconds=v.get("seconds"),
|
| 38 |
+
nanos=v.get("nanos"),
|
| 39 |
+
)
|
| 40 |
+
new_d[k] = b64
|
| 41 |
+
except Exception:
|
| 42 |
+
new_d[k] = v
|
| 43 |
+
else:
|
| 44 |
+
new_d[k] = _encode_smd_inplace(v)
|
| 45 |
+
return new_d
|
| 46 |
+
elif isinstance(obj, list):
|
| 47 |
+
return [_encode_smd_inplace(x) for x in obj]
|
| 48 |
+
else:
|
| 49 |
+
return obj
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _decode_smd_inplace(obj: Any) -> Any:
|
| 53 |
+
if isinstance(obj, dict):
|
| 54 |
+
new_d = {}
|
| 55 |
+
for k, v in obj.items():
|
| 56 |
+
if k in ("server_message_data", "serverMessageData") and isinstance(v, str):
|
| 57 |
+
try:
|
| 58 |
+
dec = decode_server_message_data(v)
|
| 59 |
+
new_d[k] = dec
|
| 60 |
+
except Exception:
|
| 61 |
+
new_d[k] = v
|
| 62 |
+
else:
|
| 63 |
+
new_d[k] = _decode_smd_inplace(v)
|
| 64 |
+
return new_d
|
| 65 |
+
elif isinstance(obj, list):
|
| 66 |
+
return [_decode_smd_inplace(x) for x in obj]
|
| 67 |
+
else:
|
| 68 |
+
return obj
|
| 69 |
+
from ..core.schema_sanitizer import sanitize_mcp_input_schema_in_packet
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class EncodeRequest(BaseModel):
|
| 73 |
+
json_data: Optional[Dict[str, Any]] = None
|
| 74 |
+
message_type: str = "warp.multi_agent.v1.Request"
|
| 75 |
+
|
| 76 |
+
task_context: Optional[Dict[str, Any]] = None
|
| 77 |
+
input: Optional[Dict[str, Any]] = None
|
| 78 |
+
settings: Optional[Dict[str, Any]] = None
|
| 79 |
+
metadata: Optional[Dict[str, Any]] = None
|
| 80 |
+
mcp_context: Optional[Dict[str, Any]] = None
|
| 81 |
+
existing_suggestions: Optional[Dict[str, Any]] = None
|
| 82 |
+
client_version: Optional[str] = None
|
| 83 |
+
os_category: Optional[str] = None
|
| 84 |
+
os_name: Optional[str] = None
|
| 85 |
+
os_version: Optional[str] = None
|
| 86 |
+
|
| 87 |
+
class Config:
|
| 88 |
+
extra = "allow"
|
| 89 |
+
|
| 90 |
+
def get_data(self) -> Dict[str, Any]:
|
| 91 |
+
if self.json_data is not None:
|
| 92 |
+
return self.json_data
|
| 93 |
+
else:
|
| 94 |
+
data: Dict[str, Any] = {}
|
| 95 |
+
if self.task_context is not None:
|
| 96 |
+
data["task_context"] = self.task_context
|
| 97 |
+
if self.input is not None:
|
| 98 |
+
data["input"] = self.input
|
| 99 |
+
if self.settings is not None:
|
| 100 |
+
data["settings"] = self.settings
|
| 101 |
+
if self.metadata is not None:
|
| 102 |
+
data["metadata"] = self.metadata
|
| 103 |
+
if self.mcp_context is not None:
|
| 104 |
+
data["mcp_context"] = self.mcp_context
|
| 105 |
+
if self.existing_suggestions is not None:
|
| 106 |
+
data["existing_suggestions"] = self.existing_suggestions
|
| 107 |
+
if self.client_version is not None:
|
| 108 |
+
data["client_version"] = self.client_version
|
| 109 |
+
if self.os_category is not None:
|
| 110 |
+
data["os_category"] = self.os_category
|
| 111 |
+
if self.os_name is not None:
|
| 112 |
+
data["os_name"] = self.os_name
|
| 113 |
+
if self.os_version is not None:
|
| 114 |
+
data["os_version"] = self.os_version
|
| 115 |
+
|
| 116 |
+
skip_keys = {
|
| 117 |
+
"json_data", "message_type", "task_context", "input", "settings", "metadata",
|
| 118 |
+
"mcp_context", "existing_suggestions", "client_version", "os_category", "os_name", "os_version"
|
| 119 |
+
}
|
| 120 |
+
try:
|
| 121 |
+
for k, v in self.__dict__.items():
|
| 122 |
+
if v is None:
|
| 123 |
+
continue
|
| 124 |
+
if k in skip_keys:
|
| 125 |
+
continue
|
| 126 |
+
if k not in data:
|
| 127 |
+
data[k] = v
|
| 128 |
+
except Exception:
|
| 129 |
+
pass
|
| 130 |
+
return data
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class DecodeRequest(BaseModel):
|
| 134 |
+
protobuf_bytes: str
|
| 135 |
+
message_type: str = "warp.multi_agent.v1.Request"
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class StreamDecodeRequest(BaseModel):
|
| 139 |
+
protobuf_chunks: List[str]
|
| 140 |
+
message_type: str = "warp.multi_agent.v1.Response"
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class ConnectionManager:
|
| 144 |
+
def __init__(self):
|
| 145 |
+
self.active_connections: List[WebSocket] = []
|
| 146 |
+
self.packet_history: List[Dict] = []
|
| 147 |
+
|
| 148 |
+
async def connect(self, websocket: WebSocket):
|
| 149 |
+
await websocket.accept()
|
| 150 |
+
self.active_connections.append(websocket)
|
| 151 |
+
logger.info(f"WebSocket连接建立,当前连接数: {len(self.active_connections)}")
|
| 152 |
+
|
| 153 |
+
def disconnect(self, websocket: WebSocket):
|
| 154 |
+
if websocket in self.active_connections:
|
| 155 |
+
self.active_connections.remove(websocket)
|
| 156 |
+
logger.info(f"WebSocket连接断开,当前连接数: {len(self.active_connections)}")
|
| 157 |
+
|
| 158 |
+
async def broadcast(self, message: Dict):
|
| 159 |
+
if not self.active_connections:
|
| 160 |
+
return
|
| 161 |
+
|
| 162 |
+
disconnected = []
|
| 163 |
+
for connection in self.active_connections:
|
| 164 |
+
try:
|
| 165 |
+
await connection.send_json(message)
|
| 166 |
+
except Exception as e:
|
| 167 |
+
logger.warning(f"发送WebSocket消息失败: {e}")
|
| 168 |
+
disconnected.append(connection)
|
| 169 |
+
for conn in disconnected:
|
| 170 |
+
self.disconnect(conn)
|
| 171 |
+
|
| 172 |
+
async def log_packet(self, packet_type: str, data: Dict, size: int):
|
| 173 |
+
packet_info = {
|
| 174 |
+
"timestamp": datetime.now().isoformat(),
|
| 175 |
+
"type": packet_type,
|
| 176 |
+
"size": size,
|
| 177 |
+
"data_preview": str(data)[:200] + "..." if len(str(data)) > 200 else str(data),
|
| 178 |
+
"full_data": data
|
| 179 |
+
}
|
| 180 |
+
|
| 181 |
+
self.packet_history.append(packet_info)
|
| 182 |
+
if len(self.packet_history) > 100:
|
| 183 |
+
self.packet_history = self.packet_history[-100:]
|
| 184 |
+
|
| 185 |
+
await self.broadcast({"event": "packet_captured", "packet": packet_info})
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
manager = ConnectionManager()
|
| 189 |
+
set_websocket_manager(manager)
|
| 190 |
+
|
| 191 |
+
app = FastAPI(title="Warp Protobuf编解码服务器", version="1.0.0")
|
| 192 |
+
app.add_middleware(
|
| 193 |
+
CORSMiddleware,
|
| 194 |
+
allow_origins=["*"],
|
| 195 |
+
allow_credentials=True,
|
| 196 |
+
allow_methods=["*"],
|
| 197 |
+
allow_headers=["*"],
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@app.get("/")
|
| 202 |
+
async def root():
|
| 203 |
+
return {"message": "Warp Protobuf编解码服务器", "version": "1.0.0"}
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@app.get("/healthz")
|
| 207 |
+
async def health_check():
|
| 208 |
+
return {"status": "ok", "timestamp": datetime.now().isoformat()}
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
@app.post("/api/encode")
|
| 212 |
+
async def encode_json_to_protobuf(request: EncodeRequest):
|
| 213 |
+
try:
|
| 214 |
+
logger.info(f"收到编码请求,消息类型: {request.message_type}")
|
| 215 |
+
actual_data = request.get_data()
|
| 216 |
+
if not actual_data:
|
| 217 |
+
raise HTTPException(400, "数据包不能为空")
|
| 218 |
+
wrapped = {"json_data": actual_data}
|
| 219 |
+
wrapped = sanitize_mcp_input_schema_in_packet(wrapped)
|
| 220 |
+
actual_data = wrapped.get("json_data", actual_data)
|
| 221 |
+
actual_data = _encode_smd_inplace(actual_data)
|
| 222 |
+
protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type)
|
| 223 |
+
try:
|
| 224 |
+
await manager.log_packet("encode", actual_data, len(protobuf_bytes))
|
| 225 |
+
except Exception as log_error:
|
| 226 |
+
logger.warning(f"数据包记录失败: {log_error}")
|
| 227 |
+
result = {
|
| 228 |
+
"protobuf_bytes": base64.b64encode(protobuf_bytes).decode('utf-8'),
|
| 229 |
+
"size": len(protobuf_bytes),
|
| 230 |
+
"message_type": request.message_type
|
| 231 |
+
}
|
| 232 |
+
logger.info(f"✅ JSON编码为protobuf成功: {len(protobuf_bytes)} 字节")
|
| 233 |
+
return result
|
| 234 |
+
except HTTPException:
|
| 235 |
+
raise
|
| 236 |
+
except Exception as e:
|
| 237 |
+
logger.error(f"❌ JSON编码失败: {e}")
|
| 238 |
+
raise HTTPException(500, f"编码失败: {str(e)}")
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
@app.post("/api/decode")
|
| 242 |
+
async def decode_protobuf_to_json(request: DecodeRequest):
|
| 243 |
+
try:
|
| 244 |
+
logger.info(f"收到解码请求,消息类型: {request.message_type}")
|
| 245 |
+
if not request.protobuf_bytes or not request.protobuf_bytes.strip():
|
| 246 |
+
raise HTTPException(400, "Protobuf数据不能为空")
|
| 247 |
+
try:
|
| 248 |
+
protobuf_bytes = base64.b64decode(request.protobuf_bytes)
|
| 249 |
+
except Exception as decode_error:
|
| 250 |
+
logger.error(f"Base64解码失败: {decode_error}")
|
| 251 |
+
raise HTTPException(400, f"Base64解码失败: {str(decode_error)}")
|
| 252 |
+
if not protobuf_bytes:
|
| 253 |
+
raise HTTPException(400, "解码后的protobuf数据为空")
|
| 254 |
+
json_data = protobuf_to_dict(protobuf_bytes, request.message_type)
|
| 255 |
+
try:
|
| 256 |
+
await manager.log_packet("decode", json_data, len(protobuf_bytes))
|
| 257 |
+
except Exception as log_error:
|
| 258 |
+
logger.warning(f"数据包记录失败: {log_error}")
|
| 259 |
+
result = {"json_data": json_data, "size": len(protobuf_bytes), "message_type": request.message_type}
|
| 260 |
+
logger.info(f"✅ Protobuf解码为JSON成功: {len(protobuf_bytes)} 字节")
|
| 261 |
+
return result
|
| 262 |
+
except HTTPException:
|
| 263 |
+
raise
|
| 264 |
+
except Exception as e:
|
| 265 |
+
logger.error(f"❌ Protobuf解码失败: {e}")
|
| 266 |
+
raise HTTPException(500, f"解码失败: {e}")
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@app.post("/api/stream-decode")
|
| 270 |
+
async def decode_stream_protobuf(request: StreamDecodeRequest):
|
| 271 |
+
try:
|
| 272 |
+
logger.info(f"收到流式解码请求,数据块数量: {len(request.protobuf_chunks)}")
|
| 273 |
+
results = []
|
| 274 |
+
total_size = 0
|
| 275 |
+
for i, chunk_b64 in enumerate(request.protobuf_chunks):
|
| 276 |
+
try:
|
| 277 |
+
chunk_bytes = base64.b64decode(chunk_b64)
|
| 278 |
+
chunk_json = protobuf_to_dict(chunk_bytes, request.message_type)
|
| 279 |
+
chunk_result = {"chunk_index": i, "json_data": chunk_json, "size": len(chunk_bytes)}
|
| 280 |
+
results.append(chunk_result)
|
| 281 |
+
total_size += len(chunk_bytes)
|
| 282 |
+
await manager.log_packet(f"stream_decode_chunk_{i}", chunk_json, len(chunk_bytes))
|
| 283 |
+
except Exception as e:
|
| 284 |
+
logger.warning(f"数据块 {i} 解码失败: {e}")
|
| 285 |
+
results.append({"chunk_index": i, "error": str(e), "size": 0})
|
| 286 |
+
try:
|
| 287 |
+
all_bytes = b''.join([base64.b64decode(chunk) for chunk in request.protobuf_chunks])
|
| 288 |
+
complete_json = protobuf_to_dict(all_bytes, request.message_type)
|
| 289 |
+
await manager.log_packet("stream_decode_complete", complete_json, len(all_bytes))
|
| 290 |
+
complete_result = {"json_data": complete_json, "size": len(all_bytes)}
|
| 291 |
+
except Exception as e:
|
| 292 |
+
complete_result = {"error": f"无法拼接完整消息: {e}", "size": total_size}
|
| 293 |
+
result = {"chunks": results, "complete": complete_result, "total_chunks": len(request.protobuf_chunks), "total_size": total_size, "message_type": request.message_type}
|
| 294 |
+
logger.info(f"✅ 流式protobuf解码完成: {len(request.protobuf_chunks)} 块,总大小 {total_size} 字节")
|
| 295 |
+
return result
|
| 296 |
+
except Exception as e:
|
| 297 |
+
logger.error(f"❌ 流式protobuf解码失败: {e}")
|
| 298 |
+
raise HTTPException(500, f"流式解码失败: {e}")
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
@app.get("/api/schemas")
|
| 302 |
+
async def get_protobuf_schemas():
|
| 303 |
+
try:
|
| 304 |
+
from ..core.protobuf import ensure_proto_runtime, ALL_MSGS, msg_cls
|
| 305 |
+
ensure_proto_runtime()
|
| 306 |
+
schemas = []
|
| 307 |
+
for msg_name in ALL_MSGS:
|
| 308 |
+
try:
|
| 309 |
+
MessageClass = msg_cls(msg_name)
|
| 310 |
+
descriptor = MessageClass.DESCRIPTOR
|
| 311 |
+
fields = []
|
| 312 |
+
for field in descriptor.fields:
|
| 313 |
+
fields.append({"name": field.name, "type": field.type, "label": getattr(field, 'label', None), "number": field.number})
|
| 314 |
+
schemas.append({"name": msg_name, "full_name": descriptor.full_name, "field_count": len(fields), "fields": fields[:10]})
|
| 315 |
+
except Exception as e:
|
| 316 |
+
logger.warning(f"获取schema {msg_name} 信息失败: {e}")
|
| 317 |
+
result = {"schemas": schemas, "total_count": len(schemas), "message": f"找到 {len(schemas)} 个protobuf消息类型"}
|
| 318 |
+
logger.info(f"✅ 返回 {len(schemas)} 个protobuf schema")
|
| 319 |
+
return result
|
| 320 |
+
except Exception as e:
|
| 321 |
+
logger.error(f"❌ 获取protobuf schemas失败: {e}")
|
| 322 |
+
raise HTTPException(500, f"获取schemas失败: {e}")
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
@app.get("/api/auth/status")
|
| 326 |
+
async def get_auth_status():
|
| 327 |
+
try:
|
| 328 |
+
jwt_token = get_jwt_token()
|
| 329 |
+
if not jwt_token:
|
| 330 |
+
return {"authenticated": False, "message": "未找到JWT token", "suggestion": "运行 'uv run refresh_jwt.py' 获取token"}
|
| 331 |
+
is_expired = is_token_expired(jwt_token)
|
| 332 |
+
result = {"authenticated": not is_expired, "token_present": True, "token_expired": is_expired, "token_preview": f"{jwt_token[:20]}...{jwt_token[-10:]}", "message": "Token有效" if not is_expired else "Token已过期"}
|
| 333 |
+
if is_expired:
|
| 334 |
+
result["suggestion"] = "运行 'uv run refresh_jwt.py' 刷新token"
|
| 335 |
+
return result
|
| 336 |
+
except Exception as e:
|
| 337 |
+
logger.error(f"❌ 获取认证状态失败: {e}")
|
| 338 |
+
raise HTTPException(500, f"获取认证状态失败: {e}")
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
@app.post("/api/auth/refresh")
|
| 342 |
+
async def refresh_auth_token():
|
| 343 |
+
try:
|
| 344 |
+
success = await refresh_jwt_if_needed()
|
| 345 |
+
if success:
|
| 346 |
+
return {"success": True, "message": "JWT token刷新成功", "timestamp": datetime.now().isoformat()}
|
| 347 |
+
else:
|
| 348 |
+
return {"success": False, "message": "JWT token刷新失败", "suggestion": "检查网络连接或手动运行 'uv run refresh_jwt.py'"}
|
| 349 |
+
except Exception as e:
|
| 350 |
+
logger.error(f"❌ 刷新JWT token失败: {e}")
|
| 351 |
+
raise HTTPException(500, f"刷新token失败: {e}")
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
@app.get("/api/auth/user_id")
|
| 355 |
+
async def get_user_id_endpoint():
|
| 356 |
+
try:
|
| 357 |
+
from ..core.auth import get_user_id
|
| 358 |
+
user_id = get_user_id()
|
| 359 |
+
if user_id:
|
| 360 |
+
return {"success": True, "user_id": user_id, "message": "User ID获取成功"}
|
| 361 |
+
else:
|
| 362 |
+
return {"success": False, "user_id": "", "message": "未找到User ID,可能需要刷新JWT token"}
|
| 363 |
+
except Exception as e:
|
| 364 |
+
logger.error(f"❌ 获取User ID失败: {e}")
|
| 365 |
+
raise HTTPException(500, f"获取User ID失败: {e}")
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
@app.get("/api/packets/history")
|
| 369 |
+
async def get_packet_history(limit: int = 50):
|
| 370 |
+
try:
|
| 371 |
+
history = manager.packet_history[-limit:] if len(manager.packet_history) > limit else manager.packet_history
|
| 372 |
+
return {"packets": history, "total_count": len(manager.packet_history), "returned_count": len(history)}
|
| 373 |
+
except Exception as e:
|
| 374 |
+
logger.error(f"❌ 获取数据包历史失败: {e}")
|
| 375 |
+
raise HTTPException(500, f"获取历史记录失败: {e}")
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
@app.post("/api/warp/send")
|
| 379 |
+
async def send_to_warp_api(
|
| 380 |
+
request: EncodeRequest,
|
| 381 |
+
show_all_events: bool = Query(True, description="Show detailed SSE event breakdown")
|
| 382 |
+
):
|
| 383 |
+
try:
|
| 384 |
+
logger.info(f"收到Warp API发送请求,消息类型: {request.message_type}")
|
| 385 |
+
actual_data = request.get_data()
|
| 386 |
+
if not actual_data:
|
| 387 |
+
raise HTTPException(400, "数据包不能为空")
|
| 388 |
+
wrapped = {"json_data": actual_data}
|
| 389 |
+
wrapped = sanitize_mcp_input_schema_in_packet(wrapped)
|
| 390 |
+
actual_data = wrapped.get("json_data", actual_data)
|
| 391 |
+
actual_data = _encode_smd_inplace(actual_data)
|
| 392 |
+
protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type)
|
| 393 |
+
logger.info(f"✅ JSON编码为protobuf成功: {len(protobuf_bytes)} 字节")
|
| 394 |
+
from ..warp.api_client import send_protobuf_to_warp_api
|
| 395 |
+
response_text, conversation_id, task_id = await send_protobuf_to_warp_api(protobuf_bytes, show_all_events=show_all_events)
|
| 396 |
+
await manager.log_packet("warp_request", actual_data, len(protobuf_bytes))
|
| 397 |
+
await manager.log_packet("warp_response", {"response": response_text, "conversation_id": conversation_id, "task_id": task_id}, len(response_text.encode()))
|
| 398 |
+
result = {"response": response_text, "conversation_id": conversation_id, "task_id": task_id, "request_size": len(protobuf_bytes), "response_size": len(response_text), "message_type": request.message_type}
|
| 399 |
+
logger.info(f"✅ Warp API调用成功,响应长度: {len(response_text)} 字符")
|
| 400 |
+
return result
|
| 401 |
+
except Exception as e:
|
| 402 |
+
import traceback
|
| 403 |
+
error_details = {"error": str(e), "error_type": type(e).__name__, "traceback": traceback.format_exc(), "request_info": {"message_type": request.message_type, "json_size": len(str(actual_data)), "has_tools": "mcp_context" in actual_data, "has_history": "task_context" in actual_data}}
|
| 404 |
+
logger.error(f"❌ Warp API调用失败: {e}")
|
| 405 |
+
logger.error(f"错误详情: {error_details}")
|
| 406 |
+
try:
|
| 407 |
+
await manager.log_packet("warp_error", error_details, 0)
|
| 408 |
+
except Exception as log_error:
|
| 409 |
+
logger.warning(f"记录错误失败: {log_error}")
|
| 410 |
+
raise HTTPException(500, detail=error_details)
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
@app.post("/api/warp/send_stream")
|
| 414 |
+
async def send_to_warp_api_parsed(
|
| 415 |
+
request: EncodeRequest
|
| 416 |
+
):
|
| 417 |
+
try:
|
| 418 |
+
logger.info(f"收到Warp API解析发送请求,消息类型: {request.message_type}")
|
| 419 |
+
actual_data = request.get_data()
|
| 420 |
+
if not actual_data:
|
| 421 |
+
raise HTTPException(400, "数据包不能为空")
|
| 422 |
+
wrapped = {"json_data": actual_data}
|
| 423 |
+
wrapped = sanitize_mcp_input_schema_in_packet(wrapped)
|
| 424 |
+
actual_data = wrapped.get("json_data", actual_data)
|
| 425 |
+
actual_data = _encode_smd_inplace(actual_data)
|
| 426 |
+
protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type)
|
| 427 |
+
logger.info(f"✅ JSON编码为protobuf成功: {len(protobuf_bytes)} 字节")
|
| 428 |
+
from ..warp.api_client import send_protobuf_to_warp_api_parsed
|
| 429 |
+
response_text, conversation_id, task_id, parsed_events = await send_protobuf_to_warp_api_parsed(protobuf_bytes)
|
| 430 |
+
parsed_events = _decode_smd_inplace(parsed_events)
|
| 431 |
+
await manager.log_packet("warp_request_parsed", actual_data, len(protobuf_bytes))
|
| 432 |
+
response_data = {"response": response_text, "conversation_id": conversation_id, "task_id": task_id, "parsed_events": parsed_events}
|
| 433 |
+
await manager.log_packet("warp_response_parsed", response_data, len(str(response_data)))
|
| 434 |
+
result = {"response": response_text, "conversation_id": conversation_id, "task_id": task_id, "request_size": len(protobuf_bytes), "response_size": len(response_text), "message_type": request.message_type, "parsed_events": parsed_events, "events_count": len(parsed_events), "events_summary": {}}
|
| 435 |
+
if parsed_events:
|
| 436 |
+
event_type_counts = {}
|
| 437 |
+
for event in parsed_events:
|
| 438 |
+
event_type = event.get("event_type", "UNKNOWN")
|
| 439 |
+
event_type_counts[event_type] = event_type_counts.get(event_type, 0) + 1
|
| 440 |
+
result["events_summary"] = event_type_counts
|
| 441 |
+
logger.info(f"✅ Warp API解析调用成功,响应长度: {len(response_text)} 字符,事件数量: {len(parsed_events)}")
|
| 442 |
+
return result
|
| 443 |
+
except Exception as e:
|
| 444 |
+
import traceback
|
| 445 |
+
error_details = {"error": str(e), "error_type": type(e).__name__, "traceback": traceback.format_exc(), "request_info": {"message_type": request.message_type, "json_size": len(str(actual_data)) if 'actual_data' in locals() else 0, "has_tools": "mcp_context" in (actual_data or {}), "has_history": "task_context" in (actual_data or {})}}
|
| 446 |
+
logger.error(f"❌ Warp API解析调用失败: {e}")
|
| 447 |
+
logger.error(f"错误详情: {error_details}")
|
| 448 |
+
try:
|
| 449 |
+
await manager.log_packet("warp_error_parsed", error_details, 0)
|
| 450 |
+
except Exception as log_error:
|
| 451 |
+
logger.warning(f"记录错误失败: {log_error}")
|
| 452 |
+
raise HTTPException(500, detail=error_details)
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
@app.post("/api/warp/send_stream_sse")
|
| 456 |
+
async def send_to_warp_api_stream_sse(request: EncodeRequest):
|
| 457 |
+
from fastapi.responses import StreamingResponse
|
| 458 |
+
import os as _os
|
| 459 |
+
import re as _re
|
| 460 |
+
try:
|
| 461 |
+
actual_data = request.get_data()
|
| 462 |
+
if not actual_data:
|
| 463 |
+
raise HTTPException(400, "数据包不能为空")
|
| 464 |
+
wrapped = {"json_data": actual_data}
|
| 465 |
+
wrapped = sanitize_mcp_input_schema_in_packet(wrapped)
|
| 466 |
+
actual_data = wrapped.get("json_data", actual_data)
|
| 467 |
+
actual_data = _encode_smd_inplace(actual_data)
|
| 468 |
+
protobuf_bytes = dict_to_protobuf_bytes(actual_data, request.message_type)
|
| 469 |
+
async def _agen():
|
| 470 |
+
warp_url = CONFIG_WARP_URL
|
| 471 |
+
def _parse_payload_bytes(data_str: str):
|
| 472 |
+
s = _re.sub(r"\s+", "", data_str or "")
|
| 473 |
+
if not s:
|
| 474 |
+
return None
|
| 475 |
+
if _re.fullmatch(r"[0-9a-fA-F]+", s or ""):
|
| 476 |
+
try:
|
| 477 |
+
return bytes.fromhex(s)
|
| 478 |
+
except Exception:
|
| 479 |
+
pass
|
| 480 |
+
pad = "=" * ((4 - (len(s) % 4)) % 4)
|
| 481 |
+
try:
|
| 482 |
+
import base64 as _b64
|
| 483 |
+
return _b64.urlsafe_b64decode(s + pad)
|
| 484 |
+
except Exception:
|
| 485 |
+
try:
|
| 486 |
+
return _b64.b64decode(s + pad)
|
| 487 |
+
except Exception:
|
| 488 |
+
return None
|
| 489 |
+
verify_opt = True
|
| 490 |
+
insecure_env = _os.getenv("WARP_INSECURE_TLS", "").lower()
|
| 491 |
+
if insecure_env in ("1", "true", "yes"):
|
| 492 |
+
verify_opt = False
|
| 493 |
+
logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API stream endpoint")
|
| 494 |
+
async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client:
|
| 495 |
+
# 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次
|
| 496 |
+
jwt = None
|
| 497 |
+
for attempt in range(2):
|
| 498 |
+
if attempt == 0 or jwt is None:
|
| 499 |
+
jwt = await get_valid_jwt()
|
| 500 |
+
headers = {
|
| 501 |
+
"accept": "text/event-stream",
|
| 502 |
+
"content-type": "application/x-protobuf",
|
| 503 |
+
"x-warp-client-version": CLIENT_VERSION,
|
| 504 |
+
"x-warp-os-category": OS_CATEGORY,
|
| 505 |
+
"x-warp-os-name": OS_NAME,
|
| 506 |
+
"x-warp-os-version": OS_VERSION,
|
| 507 |
+
"authorization": f"Bearer {jwt}",
|
| 508 |
+
"content-length": str(len(protobuf_bytes)),
|
| 509 |
+
}
|
| 510 |
+
async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response:
|
| 511 |
+
if response.status_code != 200:
|
| 512 |
+
error_text = await response.aread()
|
| 513 |
+
error_content = error_text.decode("utf-8") if error_text else ""
|
| 514 |
+
# 429 且包含配额信息时,申请匿名token后重试一次
|
| 515 |
+
if response.status_code == 429 and attempt == 0 and (
|
| 516 |
+
("No remaining quota" in error_content) or ("No AI requests remaining" in error_content)
|
| 517 |
+
):
|
| 518 |
+
logger.warning("Warp API 返回 429 (配额用尽, SSE 代理)。尝试申请匿名token并重试一次…")
|
| 519 |
+
try:
|
| 520 |
+
new_jwt = await acquire_anonymous_access_token()
|
| 521 |
+
except Exception:
|
| 522 |
+
new_jwt = None
|
| 523 |
+
if new_jwt:
|
| 524 |
+
jwt = new_jwt
|
| 525 |
+
# 重试
|
| 526 |
+
continue
|
| 527 |
+
logger.error(f"Warp API HTTP error {response.status_code}: {error_content[:300]}")
|
| 528 |
+
yield f"data: {{\"error\": \"HTTP {response.status_code}\"}}\n\n"
|
| 529 |
+
yield "data: [DONE]\n\n"
|
| 530 |
+
return
|
| 531 |
+
try:
|
| 532 |
+
logger.info(f"✅ Warp API SSE连接已建立: {warp_url}")
|
| 533 |
+
logger.info(f"📦 请求字节数: {len(protobuf_bytes)}")
|
| 534 |
+
except Exception:
|
| 535 |
+
pass
|
| 536 |
+
current_data = ""
|
| 537 |
+
event_no = 0
|
| 538 |
+
async for line in response.aiter_lines():
|
| 539 |
+
if line.startswith("data:"):
|
| 540 |
+
payload = line[5:].strip()
|
| 541 |
+
if not payload:
|
| 542 |
+
continue
|
| 543 |
+
if payload == "[DONE]":
|
| 544 |
+
break
|
| 545 |
+
current_data += payload
|
| 546 |
+
continue
|
| 547 |
+
if (line.strip() == "") and current_data:
|
| 548 |
+
raw_bytes = _parse_payload_bytes(current_data)
|
| 549 |
+
current_data = ""
|
| 550 |
+
if raw_bytes is None:
|
| 551 |
+
continue
|
| 552 |
+
try:
|
| 553 |
+
event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent")
|
| 554 |
+
except Exception:
|
| 555 |
+
continue
|
| 556 |
+
def _get(d: Dict[str, Any], *names: str) -> Any:
|
| 557 |
+
for n in names:
|
| 558 |
+
if isinstance(d, dict) and n in d:
|
| 559 |
+
return d[n]
|
| 560 |
+
return None
|
| 561 |
+
event_type = "UNKNOWN_EVENT"
|
| 562 |
+
if isinstance(event_data, dict):
|
| 563 |
+
if "init" in event_data:
|
| 564 |
+
event_type = "INITIALIZATION"
|
| 565 |
+
else:
|
| 566 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 567 |
+
if isinstance(client_actions, dict):
|
| 568 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 569 |
+
event_type = f"CLIENT_ACTIONS({len(actions)})" if actions else "CLIENT_ACTIONS_EMPTY"
|
| 570 |
+
elif "finished" in event_data:
|
| 571 |
+
event_type = "FINISHED"
|
| 572 |
+
event_no += 1
|
| 573 |
+
try:
|
| 574 |
+
logger.info(f"🔄 SSE Event #{event_no}: {event_type}")
|
| 575 |
+
except Exception:
|
| 576 |
+
pass
|
| 577 |
+
out = {"event_number": event_no, "event_type": event_type, "parsed_data": event_data}
|
| 578 |
+
try:
|
| 579 |
+
chunk = json.dumps(out, ensure_ascii=False)
|
| 580 |
+
except Exception:
|
| 581 |
+
continue
|
| 582 |
+
yield f"data: {chunk}\n\n"
|
| 583 |
+
try:
|
| 584 |
+
logger.info("="*60)
|
| 585 |
+
logger.info("📊 SSE STREAM SUMMARY (代理)")
|
| 586 |
+
logger.info("="*60)
|
| 587 |
+
logger.info(f"📈 Total Events Forwarded: {event_no}")
|
| 588 |
+
logger.info("="*60)
|
| 589 |
+
except Exception:
|
| 590 |
+
pass
|
| 591 |
+
yield "data: [DONE]\n\n"
|
| 592 |
+
return
|
| 593 |
+
return StreamingResponse(_agen(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"})
|
| 594 |
+
except HTTPException:
|
| 595 |
+
raise
|
| 596 |
+
except Exception as e:
|
| 597 |
+
import traceback
|
| 598 |
+
error_details = {"error": str(e), "error_type": type(e).__name__, "traceback": traceback.format_exc()}
|
| 599 |
+
logger.error(f"Warp SSE转发端点错误: {e}")
|
| 600 |
+
raise HTTPException(500, detail=error_details)
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
@app.websocket("/ws")
|
| 604 |
+
async def websocket_endpoint(websocket: WebSocket):
|
| 605 |
+
await manager.connect(websocket)
|
| 606 |
+
try:
|
| 607 |
+
await websocket.send_json({"event": "connected", "message": "WebSocket连接已建立", "timestamp": datetime.now().isoformat()})
|
| 608 |
+
recent_packets = manager.packet_history[-10:]
|
| 609 |
+
for packet in recent_packets:
|
| 610 |
+
await websocket.send_json({"event": "packet_history", "packet": packet})
|
| 611 |
+
while True:
|
| 612 |
+
data = await websocket.receive_text()
|
| 613 |
+
logger.debug(f"收���WebSocket消息: {data}")
|
| 614 |
+
except WebSocketDisconnect:
|
| 615 |
+
manager.disconnect(websocket)
|
| 616 |
+
except Exception as e:
|
| 617 |
+
logger.error(f"WebSocket错误: {e}")
|
| 618 |
+
manager.disconnect(websocket)
|
| 619 |
+
|
| 620 |
+
|
| 621 |
+
if __name__ == "__main__":
|
| 622 |
+
import uvicorn
|
| 623 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
warp2protobuf/config/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Re-export common config modules
|
| 2 |
+
from .settings import * # noqa: F401,F403
|
| 3 |
+
from .models import * # noqa: F401,F403
|
warp2protobuf/config/models.py
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Model configuration and catalog for Warp API
|
| 5 |
+
|
| 6 |
+
Contains model definitions, configurations, and OpenAI compatibility mappings.
|
| 7 |
+
"""
|
| 8 |
+
import time
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def get_model_config(model_name: str) -> dict:
|
| 12 |
+
"""
|
| 13 |
+
Simple model configuration mapping.
|
| 14 |
+
All models use the same pattern: base model + o3 planning + auto coding
|
| 15 |
+
"""
|
| 16 |
+
# Known models that map directly
|
| 17 |
+
known_models = {
|
| 18 |
+
"claude-4-sonnet", "claude-4-opus", "claude-4.1-opus",
|
| 19 |
+
"gpt-5", "gpt-4o", "gpt-4.1", "o3", "o4-mini",
|
| 20 |
+
"gemini-2.5-pro", "warp-basic"
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
model_name = model_name.lower().strip()
|
| 24 |
+
|
| 25 |
+
# Use the model name directly if it's known, otherwise use "auto"
|
| 26 |
+
base_model = model_name if model_name in known_models else "auto"
|
| 27 |
+
|
| 28 |
+
return {
|
| 29 |
+
"base": base_model,
|
| 30 |
+
"planning": "o3",
|
| 31 |
+
"coding": "auto"
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def get_warp_models():
|
| 36 |
+
"""Get comprehensive list of Warp AI models from packet analysis"""
|
| 37 |
+
return {
|
| 38 |
+
"agent_mode": {
|
| 39 |
+
"default": "auto",
|
| 40 |
+
"models": [
|
| 41 |
+
{
|
| 42 |
+
"id": "auto",
|
| 43 |
+
"display_name": "auto",
|
| 44 |
+
"description": "claude 4 sonnet",
|
| 45 |
+
"vision_supported": True,
|
| 46 |
+
"usage_multiplier": 1,
|
| 47 |
+
"category": "agent"
|
| 48 |
+
},
|
| 49 |
+
{
|
| 50 |
+
"id": "warp-basic",
|
| 51 |
+
"display_name": "lite",
|
| 52 |
+
"description": "basic model",
|
| 53 |
+
"vision_supported": True,
|
| 54 |
+
"usage_multiplier": 1,
|
| 55 |
+
"category": "agent"
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"id": "gpt-5",
|
| 59 |
+
"display_name": "gpt-5",
|
| 60 |
+
"description": None,
|
| 61 |
+
"vision_supported": True,
|
| 62 |
+
"usage_multiplier": 1,
|
| 63 |
+
"category": "agent"
|
| 64 |
+
},
|
| 65 |
+
{
|
| 66 |
+
"id": "claude-4-sonnet",
|
| 67 |
+
"display_name": "claude 4 sonnet",
|
| 68 |
+
"description": None,
|
| 69 |
+
"vision_supported": True,
|
| 70 |
+
"usage_multiplier": 1,
|
| 71 |
+
"category": "agent"
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"id": "claude-4-opus",
|
| 75 |
+
"display_name": "claude 4 opus",
|
| 76 |
+
"description": None,
|
| 77 |
+
"vision_supported": True,
|
| 78 |
+
"usage_multiplier": 1,
|
| 79 |
+
"category": "agent"
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"id": "claude-4.1-opus",
|
| 83 |
+
"display_name": "claude 4.1 opus",
|
| 84 |
+
"description": None,
|
| 85 |
+
"vision_supported": True,
|
| 86 |
+
"usage_multiplier": 1,
|
| 87 |
+
"category": "agent"
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"id": "gpt-4o",
|
| 91 |
+
"display_name": "gpt-4o",
|
| 92 |
+
"description": None,
|
| 93 |
+
"vision_supported": True,
|
| 94 |
+
"usage_multiplier": 1,
|
| 95 |
+
"category": "agent"
|
| 96 |
+
},
|
| 97 |
+
{
|
| 98 |
+
"id": "gpt-4.1",
|
| 99 |
+
"display_name": "gpt-4.1",
|
| 100 |
+
"description": None,
|
| 101 |
+
"vision_supported": True,
|
| 102 |
+
"usage_multiplier": 1,
|
| 103 |
+
"category": "agent"
|
| 104 |
+
},
|
| 105 |
+
{
|
| 106 |
+
"id": "o4-mini",
|
| 107 |
+
"display_name": "o4-mini",
|
| 108 |
+
"description": None,
|
| 109 |
+
"vision_supported": True,
|
| 110 |
+
"usage_multiplier": 1,
|
| 111 |
+
"category": "agent"
|
| 112 |
+
},
|
| 113 |
+
{
|
| 114 |
+
"id": "o3",
|
| 115 |
+
"display_name": "o3",
|
| 116 |
+
"description": None,
|
| 117 |
+
"vision_supported": True,
|
| 118 |
+
"usage_multiplier": 1,
|
| 119 |
+
"category": "agent"
|
| 120 |
+
},
|
| 121 |
+
{
|
| 122 |
+
"id": "gemini-2.5-pro",
|
| 123 |
+
"display_name": "gemini 2.5 pro",
|
| 124 |
+
"description": None,
|
| 125 |
+
"vision_supported": True,
|
| 126 |
+
"usage_multiplier": 1,
|
| 127 |
+
"category": "agent"
|
| 128 |
+
}
|
| 129 |
+
]
|
| 130 |
+
},
|
| 131 |
+
"planning": {
|
| 132 |
+
"default": "o3",
|
| 133 |
+
"models": [
|
| 134 |
+
{
|
| 135 |
+
"id": "warp-basic",
|
| 136 |
+
"display_name": "lite",
|
| 137 |
+
"description": "basic model",
|
| 138 |
+
"vision_supported": True,
|
| 139 |
+
"usage_multiplier": 1,
|
| 140 |
+
"category": "planning"
|
| 141 |
+
},
|
| 142 |
+
{
|
| 143 |
+
"id": "gpt-5 (high reasoning)",
|
| 144 |
+
"display_name": "gpt-5",
|
| 145 |
+
"description": "high reasoning",
|
| 146 |
+
"vision_supported": False,
|
| 147 |
+
"usage_multiplier": 1,
|
| 148 |
+
"category": "planning"
|
| 149 |
+
},
|
| 150 |
+
{
|
| 151 |
+
"id": "claude-4-opus",
|
| 152 |
+
"display_name": "claude 4 opus",
|
| 153 |
+
"description": None,
|
| 154 |
+
"vision_supported": True,
|
| 155 |
+
"usage_multiplier": 1,
|
| 156 |
+
"category": "planning"
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"id": "claude-4.1-opus",
|
| 160 |
+
"display_name": "claude 4.1 opus",
|
| 161 |
+
"description": None,
|
| 162 |
+
"vision_supported": True,
|
| 163 |
+
"usage_multiplier": 1,
|
| 164 |
+
"category": "planning"
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"id": "gpt-4.1",
|
| 168 |
+
"display_name": "gpt-4.1",
|
| 169 |
+
"description": None,
|
| 170 |
+
"vision_supported": True,
|
| 171 |
+
"usage_multiplier": 1,
|
| 172 |
+
"category": "planning"
|
| 173 |
+
},
|
| 174 |
+
{
|
| 175 |
+
"id": "o4-mini",
|
| 176 |
+
"display_name": "o4-mini",
|
| 177 |
+
"description": None,
|
| 178 |
+
"vision_supported": True,
|
| 179 |
+
"usage_multiplier": 1,
|
| 180 |
+
"category": "planning"
|
| 181 |
+
},
|
| 182 |
+
{
|
| 183 |
+
"id": "o3",
|
| 184 |
+
"display_name": "o3",
|
| 185 |
+
"description": None,
|
| 186 |
+
"vision_supported": True,
|
| 187 |
+
"usage_multiplier": 1,
|
| 188 |
+
"category": "planning"
|
| 189 |
+
}
|
| 190 |
+
]
|
| 191 |
+
},
|
| 192 |
+
"coding": {
|
| 193 |
+
"default": "auto",
|
| 194 |
+
"models": [
|
| 195 |
+
{
|
| 196 |
+
"id": "auto",
|
| 197 |
+
"display_name": "auto",
|
| 198 |
+
"description": "claude 4 sonnet",
|
| 199 |
+
"vision_supported": True,
|
| 200 |
+
"usage_multiplier": 1,
|
| 201 |
+
"category": "coding"
|
| 202 |
+
},
|
| 203 |
+
{
|
| 204 |
+
"id": "warp-basic",
|
| 205 |
+
"display_name": "lite",
|
| 206 |
+
"description": "basic model",
|
| 207 |
+
"vision_supported": True,
|
| 208 |
+
"usage_multiplier": 1,
|
| 209 |
+
"category": "coding"
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"id": "gpt-5",
|
| 213 |
+
"display_name": "gpt-5",
|
| 214 |
+
"description": None,
|
| 215 |
+
"vision_supported": True,
|
| 216 |
+
"usage_multiplier": 1,
|
| 217 |
+
"category": "coding"
|
| 218 |
+
},
|
| 219 |
+
{
|
| 220 |
+
"id": "claude-4-sonnet",
|
| 221 |
+
"display_name": "claude 4 sonnet",
|
| 222 |
+
"description": None,
|
| 223 |
+
"vision_supported": True,
|
| 224 |
+
"usage_multiplier": 1,
|
| 225 |
+
"category": "coding"
|
| 226 |
+
},
|
| 227 |
+
{
|
| 228 |
+
"id": "claude-4-opus",
|
| 229 |
+
"display_name": "claude 4 opus",
|
| 230 |
+
"description": None,
|
| 231 |
+
"vision_supported": True,
|
| 232 |
+
"usage_multiplier": 1,
|
| 233 |
+
"category": "coding"
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"id": "claude-4.1-opus",
|
| 237 |
+
"display_name": "claude 4.1 opus",
|
| 238 |
+
"description": None,
|
| 239 |
+
"vision_supported": True,
|
| 240 |
+
"usage_multiplier": 1,
|
| 241 |
+
"category": "coding"
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"id": "gpt-4o",
|
| 245 |
+
"display_name": "gpt-4o",
|
| 246 |
+
"description": None,
|
| 247 |
+
"vision_supported": True,
|
| 248 |
+
"usage_multiplier": 1,
|
| 249 |
+
"category": "coding"
|
| 250 |
+
},
|
| 251 |
+
{
|
| 252 |
+
"id": "gpt-4.1",
|
| 253 |
+
"display_name": "gpt-4.1",
|
| 254 |
+
"description": None,
|
| 255 |
+
"vision_supported": True,
|
| 256 |
+
"usage_multiplier": 1,
|
| 257 |
+
"category": "coding"
|
| 258 |
+
},
|
| 259 |
+
{
|
| 260 |
+
"id": "o4-mini",
|
| 261 |
+
"display_name": "o4-mini",
|
| 262 |
+
"description": None,
|
| 263 |
+
"vision_supported": True,
|
| 264 |
+
"usage_multiplier": 1,
|
| 265 |
+
"category": "coding"
|
| 266 |
+
},
|
| 267 |
+
{
|
| 268 |
+
"id": "o3",
|
| 269 |
+
"display_name": "o3",
|
| 270 |
+
"description": None,
|
| 271 |
+
"vision_supported": True,
|
| 272 |
+
"usage_multiplier": 1,
|
| 273 |
+
"category": "coding"
|
| 274 |
+
},
|
| 275 |
+
{
|
| 276 |
+
"id": "gemini-2.5-pro",
|
| 277 |
+
"display_name": "gemini 2.5 pro",
|
| 278 |
+
"description": None,
|
| 279 |
+
"vision_supported": True,
|
| 280 |
+
"usage_multiplier": 1,
|
| 281 |
+
"category": "coding"
|
| 282 |
+
}
|
| 283 |
+
]
|
| 284 |
+
}
|
| 285 |
+
}
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def get_all_unique_models():
|
| 289 |
+
"""Get all unique models across all categories for OpenAI API compatibility"""
|
| 290 |
+
try:
|
| 291 |
+
models_data = get_warp_models()
|
| 292 |
+
unique_models = {}
|
| 293 |
+
|
| 294 |
+
# Collect all unique models across categories
|
| 295 |
+
for category_data in models_data.values():
|
| 296 |
+
for model in category_data["models"]:
|
| 297 |
+
model_id = model["id"]
|
| 298 |
+
if model_id not in unique_models:
|
| 299 |
+
# Create OpenAI-compatible model entry
|
| 300 |
+
unique_models[model_id] = {
|
| 301 |
+
"id": model_id,
|
| 302 |
+
"object": "model",
|
| 303 |
+
"created": int(time.time()),
|
| 304 |
+
"owned_by": "warp",
|
| 305 |
+
"display_name": model["display_name"],
|
| 306 |
+
"description": model["description"] or model["display_name"],
|
| 307 |
+
"vision_supported": model["vision_supported"],
|
| 308 |
+
"usage_multiplier": model["usage_multiplier"],
|
| 309 |
+
"categories": [model["category"]]
|
| 310 |
+
}
|
| 311 |
+
else:
|
| 312 |
+
# Add category if model appears in multiple categories
|
| 313 |
+
if model["category"] not in unique_models[model_id]["categories"]:
|
| 314 |
+
unique_models[model_id]["categories"].append(model["category"])
|
| 315 |
+
|
| 316 |
+
return list(unique_models.values())
|
| 317 |
+
except Exception:
|
| 318 |
+
# Fallback to simple model list
|
| 319 |
+
return [
|
| 320 |
+
{
|
| 321 |
+
"id": "auto",
|
| 322 |
+
"object": "model",
|
| 323 |
+
"created": int(time.time()),
|
| 324 |
+
"owned_by": "warp",
|
| 325 |
+
"display_name": "auto",
|
| 326 |
+
"description": "Auto-select best model"
|
| 327 |
+
}
|
| 328 |
+
]
|
warp2protobuf/config/settings.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Configuration settings for Warp API server
|
| 5 |
+
|
| 6 |
+
Contains environment variables, paths, and constants.
|
| 7 |
+
"""
|
| 8 |
+
import os
|
| 9 |
+
import pathlib
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
|
| 12 |
+
# Load environment variables
|
| 13 |
+
load_dotenv()
|
| 14 |
+
|
| 15 |
+
# Path configurations
|
| 16 |
+
SCRIPT_DIR = pathlib.Path(__file__).resolve().parent.parent.parent
|
| 17 |
+
PROTO_DIR = SCRIPT_DIR / "proto"
|
| 18 |
+
LOGS_DIR = SCRIPT_DIR / "logs"
|
| 19 |
+
|
| 20 |
+
# API configuration
|
| 21 |
+
WARP_URL = "https://app.warp.dev/ai/multi-agent"
|
| 22 |
+
|
| 23 |
+
# Environment variables with defaults
|
| 24 |
+
HOST = os.getenv("HOST", "0.0.0.0")
|
| 25 |
+
PORT = int(os.getenv("PORT", "8002"))
|
| 26 |
+
WARP_JWT = os.getenv("WARP_JWT")
|
| 27 |
+
|
| 28 |
+
# Client headers configuration
|
| 29 |
+
CLIENT_VERSION = "v0.2025.08.06.08.12.stable_02"
|
| 30 |
+
OS_CATEGORY = "Windows"
|
| 31 |
+
OS_NAME = "Windows"
|
| 32 |
+
OS_VERSION = "11 (26100)"
|
| 33 |
+
|
| 34 |
+
# Protobuf field names for text detection
|
| 35 |
+
TEXT_FIELD_NAMES = ("text", "prompt", "query", "content", "message", "input")
|
| 36 |
+
PATH_HINT_BONUS = ("conversation", "query", "input", "user", "request", "delta")
|
| 37 |
+
|
| 38 |
+
# Response parsing configuration
|
| 39 |
+
SYSTEM_STR = {"agent_output.text", "server_message_data", "USER_INITIATED", "agent_output", "text"}
|
| 40 |
+
|
| 41 |
+
# JWT refresh configuration
|
| 42 |
+
REFRESH_TOKEN_B64 = "Z3JhbnRfdHlwZT1yZWZyZXNoX3Rva2VuJnJlZnJlc2hfdG9rZW49QU1mLXZCeFNSbWRodmVHR0JZTTY5cDA1a0RoSW4xaTd3c2NBTEVtQzlmWURScEh6akVSOWRMN2trLWtIUFl3dlk5Uk9rbXk1MHFHVGNJaUpaNEFtODZoUFhrcFZQTDkwSEptQWY1Zlo3UGVqeXBkYmNLNHdzbzhLZjNheGlTV3RJUk9oT2NuOU56R2FTdmw3V3FSTU5PcEhHZ0JyWW40SThrclc1N1I4X3dzOHU3WGNTdzh1MERpTDlIcnBNbTBMdHdzQ2g4MWtfNmJiMkNXT0ViMWxJeDNIV1NCVGVQRldzUQ=="
|
| 43 |
+
REFRESH_URL = "https://app.warp.dev/proxy/token?key=AIzaSyBdy3O3S9hrdayLJxJ7mriBR4qgUaUygAs"
|
warp2protobuf/core/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Core subpackage for warp2protobuf
|
| 2 |
+
|
| 3 |
+
__all__ = []
|
warp2protobuf/core/auth.py
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
JWT Authentication for Warp API
|
| 5 |
+
|
| 6 |
+
Handles JWT token management, refresh, and validation.
|
| 7 |
+
Integrates functionality from refresh_jwt.py.
|
| 8 |
+
"""
|
| 9 |
+
import base64
|
| 10 |
+
import json
|
| 11 |
+
import os
|
| 12 |
+
import time
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
import httpx
|
| 15 |
+
import asyncio
|
| 16 |
+
from dotenv import load_dotenv, set_key
|
| 17 |
+
|
| 18 |
+
from ..config.settings import REFRESH_TOKEN_B64, REFRESH_URL, CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION
|
| 19 |
+
from .logging import logger, log
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def decode_jwt_payload(token: str) -> dict:
|
| 23 |
+
"""Decode JWT payload to check expiration"""
|
| 24 |
+
try:
|
| 25 |
+
parts = token.split('.')
|
| 26 |
+
if len(parts) != 3:
|
| 27 |
+
return {}
|
| 28 |
+
payload_b64 = parts[1]
|
| 29 |
+
padding = 4 - len(payload_b64) % 4
|
| 30 |
+
if padding != 4:
|
| 31 |
+
payload_b64 += '=' * padding
|
| 32 |
+
payload_bytes = base64.urlsafe_b64decode(payload_b64)
|
| 33 |
+
payload = json.loads(payload_bytes.decode('utf-8'))
|
| 34 |
+
return payload
|
| 35 |
+
except Exception as e:
|
| 36 |
+
logger.debug(f"Error decoding JWT: {e}")
|
| 37 |
+
return {}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def is_token_expired(token: str, buffer_minutes: int = 5) -> bool:
|
| 41 |
+
payload = decode_jwt_payload(token)
|
| 42 |
+
if not payload or 'exp' not in payload:
|
| 43 |
+
return True
|
| 44 |
+
expiry_time = payload['exp']
|
| 45 |
+
current_time = time.time()
|
| 46 |
+
buffer_time = buffer_minutes * 60
|
| 47 |
+
return (expiry_time - current_time) <= buffer_time
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
async def refresh_jwt_token() -> dict:
|
| 51 |
+
"""Refresh the JWT token using the refresh token.
|
| 52 |
+
|
| 53 |
+
Prefers environment variable WARP_REFRESH_TOKEN when present; otherwise
|
| 54 |
+
falls back to the baked-in REFRESH_TOKEN_B64 payload.
|
| 55 |
+
"""
|
| 56 |
+
logger.info("Refreshing JWT token...")
|
| 57 |
+
# Prefer dynamic refresh token from environment if present
|
| 58 |
+
env_refresh = os.getenv("WARP_REFRESH_TOKEN")
|
| 59 |
+
if env_refresh:
|
| 60 |
+
payload = f"grant_type=refresh_token&refresh_token={env_refresh}".encode("utf-8")
|
| 61 |
+
else:
|
| 62 |
+
payload = base64.b64decode(REFRESH_TOKEN_B64)
|
| 63 |
+
headers = {
|
| 64 |
+
"x-warp-client-version": CLIENT_VERSION,
|
| 65 |
+
"x-warp-os-category": OS_CATEGORY,
|
| 66 |
+
"x-warp-os-name": OS_NAME,
|
| 67 |
+
"x-warp-os-version": OS_VERSION,
|
| 68 |
+
"content-type": "application/x-www-form-urlencoded",
|
| 69 |
+
"accept": "*/*",
|
| 70 |
+
"accept-encoding": "gzip, br",
|
| 71 |
+
"content-length": str(len(payload))
|
| 72 |
+
}
|
| 73 |
+
try:
|
| 74 |
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 75 |
+
response = await client.post(
|
| 76 |
+
REFRESH_URL,
|
| 77 |
+
headers=headers,
|
| 78 |
+
content=payload
|
| 79 |
+
)
|
| 80 |
+
if response.status_code == 200:
|
| 81 |
+
token_data = response.json()
|
| 82 |
+
logger.info("Token refresh successful")
|
| 83 |
+
return token_data
|
| 84 |
+
else:
|
| 85 |
+
logger.error(f"Token refresh failed: {response.status_code}")
|
| 86 |
+
logger.error(f"Response: {response.text}")
|
| 87 |
+
return {}
|
| 88 |
+
except Exception as e:
|
| 89 |
+
logger.error(f"Error refreshing token: {e}")
|
| 90 |
+
return {}
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def update_env_file(new_jwt: str) -> bool:
|
| 94 |
+
env_path = Path(".env")
|
| 95 |
+
try:
|
| 96 |
+
set_key(str(env_path), "WARP_JWT", new_jwt)
|
| 97 |
+
logger.info("Updated .env file with new JWT token")
|
| 98 |
+
return True
|
| 99 |
+
except Exception as e:
|
| 100 |
+
logger.error(f"Error updating .env file: {e}")
|
| 101 |
+
return False
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def update_env_refresh_token(refresh_token: str) -> bool:
|
| 105 |
+
env_path = Path(".env")
|
| 106 |
+
try:
|
| 107 |
+
set_key(str(env_path), "WARP_REFRESH_TOKEN", refresh_token)
|
| 108 |
+
logger.info("Updated .env with WARP_REFRESH_TOKEN")
|
| 109 |
+
return True
|
| 110 |
+
except Exception as e:
|
| 111 |
+
logger.error(f"Error updating .env WARP_REFRESH_TOKEN: {e}")
|
| 112 |
+
return False
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
async def check_and_refresh_token() -> bool:
|
| 116 |
+
current_jwt = os.getenv("WARP_JWT")
|
| 117 |
+
if not current_jwt:
|
| 118 |
+
logger.warning("No JWT token found in environment")
|
| 119 |
+
token_data = await refresh_jwt_token()
|
| 120 |
+
if token_data and "access_token" in token_data:
|
| 121 |
+
return update_env_file(token_data["access_token"])
|
| 122 |
+
return False
|
| 123 |
+
logger.debug("Checking current JWT token expiration...")
|
| 124 |
+
if is_token_expired(current_jwt, buffer_minutes=15):
|
| 125 |
+
logger.info("JWT token is expired or expiring soon, refreshing...")
|
| 126 |
+
token_data = await refresh_jwt_token()
|
| 127 |
+
if token_data and "access_token" in token_data:
|
| 128 |
+
new_jwt = token_data["access_token"]
|
| 129 |
+
if not is_token_expired(new_jwt, buffer_minutes=0):
|
| 130 |
+
logger.info("New token is valid")
|
| 131 |
+
return update_env_file(new_jwt)
|
| 132 |
+
else:
|
| 133 |
+
logger.warning("New token appears to be invalid or expired")
|
| 134 |
+
return False
|
| 135 |
+
else:
|
| 136 |
+
logger.error("Failed to get new token from refresh")
|
| 137 |
+
return False
|
| 138 |
+
else:
|
| 139 |
+
payload = decode_jwt_payload(current_jwt)
|
| 140 |
+
if payload and 'exp' in payload:
|
| 141 |
+
expiry_time = payload['exp']
|
| 142 |
+
time_left = expiry_time - time.time()
|
| 143 |
+
hours_left = time_left / 3600
|
| 144 |
+
logger.debug(f"Current token is still valid ({hours_left:.1f} hours remaining)")
|
| 145 |
+
else:
|
| 146 |
+
logger.debug("Current token appears valid")
|
| 147 |
+
return True
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
async def get_valid_jwt() -> str:
|
| 151 |
+
from dotenv import load_dotenv as _load
|
| 152 |
+
_load(override=True)
|
| 153 |
+
jwt = os.getenv("WARP_JWT")
|
| 154 |
+
if not jwt:
|
| 155 |
+
logger.info("No JWT token found, attempting to refresh...")
|
| 156 |
+
if await check_and_refresh_token():
|
| 157 |
+
_load(override=True)
|
| 158 |
+
jwt = os.getenv("WARP_JWT")
|
| 159 |
+
if not jwt:
|
| 160 |
+
raise RuntimeError("WARP_JWT is not set and refresh failed")
|
| 161 |
+
if is_token_expired(jwt, buffer_minutes=2):
|
| 162 |
+
logger.info("JWT token is expired or expiring soon, attempting to refresh...")
|
| 163 |
+
if await check_and_refresh_token():
|
| 164 |
+
_load(override=True)
|
| 165 |
+
jwt = os.getenv("WARP_JWT")
|
| 166 |
+
if not jwt or is_token_expired(jwt, buffer_minutes=0):
|
| 167 |
+
logger.warning("Warning: New token has short expiry but proceeding anyway")
|
| 168 |
+
else:
|
| 169 |
+
logger.warning("Warning: JWT token refresh failed, trying to use existing token")
|
| 170 |
+
return jwt
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def get_jwt_token() -> str:
|
| 174 |
+
from dotenv import load_dotenv as _load
|
| 175 |
+
_load()
|
| 176 |
+
return os.getenv("WARP_JWT", "")
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
async def refresh_jwt_if_needed() -> bool:
|
| 180 |
+
try:
|
| 181 |
+
return await check_and_refresh_token()
|
| 182 |
+
except Exception as e:
|
| 183 |
+
logger.error(f"JWT refresh failed: {e}")
|
| 184 |
+
return False
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
# ============ Anonymous token acquisition (quota refresh) ============
|
| 188 |
+
|
| 189 |
+
_ANON_GQL_URL = "https://app.warp.dev/graphql/v2?op=CreateAnonymousUser"
|
| 190 |
+
_IDENTITY_TOOLKIT_BASE = "https://identitytoolkit.googleapis.com/v1/accounts:signInWithCustomToken"
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def _extract_google_api_key_from_refresh_url() -> str:
|
| 194 |
+
try:
|
| 195 |
+
# REFRESH_URL like: https://app.warp.dev/proxy/token?key=API_KEY
|
| 196 |
+
from urllib.parse import urlparse, parse_qs
|
| 197 |
+
parsed = urlparse(REFRESH_URL)
|
| 198 |
+
qs = parse_qs(parsed.query)
|
| 199 |
+
key = qs.get("key", [""])[0]
|
| 200 |
+
return key
|
| 201 |
+
except Exception:
|
| 202 |
+
return ""
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
async def _create_anonymous_user() -> dict:
|
| 206 |
+
headers = {
|
| 207 |
+
"accept-encoding": "gzip, br",
|
| 208 |
+
"content-type": "application/json",
|
| 209 |
+
"x-warp-client-version": CLIENT_VERSION,
|
| 210 |
+
"x-warp-os-category": OS_CATEGORY,
|
| 211 |
+
"x-warp-os-name": OS_NAME,
|
| 212 |
+
"x-warp-os-version": OS_VERSION,
|
| 213 |
+
}
|
| 214 |
+
# GraphQL payload per anonymous.MD
|
| 215 |
+
query = (
|
| 216 |
+
"mutation CreateAnonymousUser($input: CreateAnonymousUserInput!, $requestContext: RequestContext!) {\n"
|
| 217 |
+
" createAnonymousUser(input: $input, requestContext: $requestContext) {\n"
|
| 218 |
+
" __typename\n"
|
| 219 |
+
" ... on CreateAnonymousUserOutput {\n"
|
| 220 |
+
" expiresAt\n"
|
| 221 |
+
" anonymousUserType\n"
|
| 222 |
+
" firebaseUid\n"
|
| 223 |
+
" idToken\n"
|
| 224 |
+
" isInviteValid\n"
|
| 225 |
+
" responseContext { serverVersion }\n"
|
| 226 |
+
" }\n"
|
| 227 |
+
" ... on UserFacingError {\n"
|
| 228 |
+
" error { __typename message }\n"
|
| 229 |
+
" responseContext { serverVersion }\n"
|
| 230 |
+
" }\n"
|
| 231 |
+
" }\n"
|
| 232 |
+
"}\n"
|
| 233 |
+
)
|
| 234 |
+
variables = {
|
| 235 |
+
"input": {
|
| 236 |
+
"anonymousUserType": "NATIVE_CLIENT_ANONYMOUS_USER_FEATURE_GATED",
|
| 237 |
+
"expirationType": "NO_EXPIRATION",
|
| 238 |
+
"referralCode": None
|
| 239 |
+
},
|
| 240 |
+
"requestContext": {
|
| 241 |
+
"clientContext": {"version": CLIENT_VERSION},
|
| 242 |
+
"osContext": {
|
| 243 |
+
"category": OS_CATEGORY,
|
| 244 |
+
"linuxKernelVersion": None,
|
| 245 |
+
"name": OS_NAME,
|
| 246 |
+
"version": OS_VERSION,
|
| 247 |
+
}
|
| 248 |
+
}
|
| 249 |
+
}
|
| 250 |
+
body = {"query": query, "variables": variables, "operationName": "CreateAnonymousUser"}
|
| 251 |
+
async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client:
|
| 252 |
+
resp = await client.post(_ANON_GQL_URL, headers=headers, json=body)
|
| 253 |
+
if resp.status_code != 200:
|
| 254 |
+
raise RuntimeError(f"CreateAnonymousUser failed: HTTP {resp.status_code} {resp.text[:200]}")
|
| 255 |
+
data = resp.json()
|
| 256 |
+
return data
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
async def _exchange_id_token_for_refresh_token(id_token: str) -> dict:
|
| 260 |
+
key = _extract_google_api_key_from_refresh_url()
|
| 261 |
+
url = f"{_IDENTITY_TOOLKIT_BASE}?key={key}" if key else f"{_IDENTITY_TOOLKIT_BASE}?key=AIzaSyBdy3O3S9hrdayLJxJ7mriBR4qgUaUygAs"
|
| 262 |
+
headers = {
|
| 263 |
+
"accept-encoding": "gzip, br",
|
| 264 |
+
"content-type": "application/x-www-form-urlencoded",
|
| 265 |
+
"x-warp-client-version": CLIENT_VERSION,
|
| 266 |
+
"x-warp-os-category": OS_CATEGORY,
|
| 267 |
+
"x-warp-os-name": OS_NAME,
|
| 268 |
+
"x-warp-os-version": OS_VERSION,
|
| 269 |
+
}
|
| 270 |
+
form = {
|
| 271 |
+
"returnSecureToken": "true",
|
| 272 |
+
"token": id_token,
|
| 273 |
+
}
|
| 274 |
+
async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client:
|
| 275 |
+
resp = await client.post(url, headers=headers, data=form)
|
| 276 |
+
if resp.status_code != 200:
|
| 277 |
+
raise RuntimeError(f"signInWithCustomToken failed: HTTP {resp.status_code} {resp.text[:200]}")
|
| 278 |
+
return resp.json()
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
async def acquire_anonymous_access_token() -> str:
|
| 282 |
+
"""Acquire a new anonymous access token (quota refresh) and persist to .env.
|
| 283 |
+
|
| 284 |
+
Returns the new access token string. Raises on failure.
|
| 285 |
+
"""
|
| 286 |
+
logger.info("Acquiring anonymous access token via GraphQL + Identity Toolkit…")
|
| 287 |
+
data = await _create_anonymous_user()
|
| 288 |
+
id_token = None
|
| 289 |
+
try:
|
| 290 |
+
id_token = data["data"]["createAnonymousUser"].get("idToken")
|
| 291 |
+
except Exception:
|
| 292 |
+
pass
|
| 293 |
+
if not id_token:
|
| 294 |
+
raise RuntimeError(f"CreateAnonymousUser did not return idToken: {data}")
|
| 295 |
+
|
| 296 |
+
signin = await _exchange_id_token_for_refresh_token(id_token)
|
| 297 |
+
refresh_token = signin.get("refreshToken")
|
| 298 |
+
if not refresh_token:
|
| 299 |
+
raise RuntimeError(f"signInWithCustomToken did not return refreshToken: {signin}")
|
| 300 |
+
|
| 301 |
+
# Persist refresh token for future time-based refreshes
|
| 302 |
+
update_env_refresh_token(refresh_token)
|
| 303 |
+
|
| 304 |
+
# Now call Warp proxy token endpoint to get access_token using this refresh token
|
| 305 |
+
payload = f"grant_type=refresh_token&refresh_token={refresh_token}".encode("utf-8")
|
| 306 |
+
headers = {
|
| 307 |
+
"x-warp-client-version": CLIENT_VERSION,
|
| 308 |
+
"x-warp-os-category": OS_CATEGORY,
|
| 309 |
+
"x-warp-os-name": OS_NAME,
|
| 310 |
+
"x-warp-os-version": OS_VERSION,
|
| 311 |
+
"content-type": "application/x-www-form-urlencoded",
|
| 312 |
+
"accept": "*/*",
|
| 313 |
+
"accept-encoding": "gzip, br",
|
| 314 |
+
"content-length": str(len(payload))
|
| 315 |
+
}
|
| 316 |
+
async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client:
|
| 317 |
+
resp = await client.post(REFRESH_URL, headers=headers, content=payload)
|
| 318 |
+
if resp.status_code != 200:
|
| 319 |
+
raise RuntimeError(f"Acquire access_token failed: HTTP {resp.status_code} {resp.text[:200]}")
|
| 320 |
+
token_data = resp.json()
|
| 321 |
+
access = token_data.get("access_token")
|
| 322 |
+
if not access:
|
| 323 |
+
raise RuntimeError(f"No access_token in response: {token_data}")
|
| 324 |
+
update_env_file(access)
|
| 325 |
+
return access
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
def print_token_info():
|
| 329 |
+
current_jwt = os.getenv("WARP_JWT")
|
| 330 |
+
if not current_jwt:
|
| 331 |
+
logger.info("No JWT token found")
|
| 332 |
+
return
|
| 333 |
+
payload = decode_jwt_payload(current_jwt)
|
| 334 |
+
if not payload:
|
| 335 |
+
logger.info("Cannot decode JWT token")
|
| 336 |
+
return
|
| 337 |
+
logger.info("=== JWT Token Information ===")
|
| 338 |
+
if 'email' in payload:
|
| 339 |
+
logger.info(f"Email: {payload['email']}")
|
| 340 |
+
if 'user_id' in payload:
|
| 341 |
+
logger.info(f"User ID: {payload['user_id']}")
|
warp2protobuf/core/logging.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Logging system for Warp API server
|
| 5 |
+
|
| 6 |
+
Provides comprehensive logging with file rotation and console output.
|
| 7 |
+
"""
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import shutil
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
from logging.handlers import RotatingFileHandler
|
| 13 |
+
from ..config.settings import LOGS_DIR
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def backup_existing_log():
|
| 17 |
+
"""Backup existing log file with timestamp"""
|
| 18 |
+
log_file = LOGS_DIR / 'warp_api.log'
|
| 19 |
+
|
| 20 |
+
if log_file.exists():
|
| 21 |
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
| 22 |
+
backup_name = f'warp_api_{timestamp}.log'
|
| 23 |
+
backup_path = LOGS_DIR / backup_name
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
shutil.move(str(log_file), str(backup_path))
|
| 27 |
+
print(f"Previous log backed up as: {backup_name}")
|
| 28 |
+
except Exception as e:
|
| 29 |
+
print(f"Warning: Could not backup log file: {e}")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def setup_logging():
|
| 33 |
+
"""Configure comprehensive logging system"""
|
| 34 |
+
LOGS_DIR.mkdir(exist_ok=True)
|
| 35 |
+
|
| 36 |
+
backup_existing_log()
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger('warp_api')
|
| 39 |
+
logger.setLevel(logging.DEBUG)
|
| 40 |
+
|
| 41 |
+
for handler in logger.handlers[:]:
|
| 42 |
+
logger.removeHandler(handler)
|
| 43 |
+
|
| 44 |
+
file_handler = RotatingFileHandler(
|
| 45 |
+
LOGS_DIR / 'warp_api.log',
|
| 46 |
+
maxBytes=10*1024*1024,
|
| 47 |
+
backupCount=5,
|
| 48 |
+
encoding='utf-8'
|
| 49 |
+
)
|
| 50 |
+
file_handler.setLevel(logging.DEBUG)
|
| 51 |
+
|
| 52 |
+
console_handler = logging.StreamHandler()
|
| 53 |
+
console_handler.setLevel(logging.INFO)
|
| 54 |
+
|
| 55 |
+
formatter = logging.Formatter(
|
| 56 |
+
'%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s'
|
| 57 |
+
)
|
| 58 |
+
file_handler.setFormatter(formatter)
|
| 59 |
+
console_handler.setFormatter(formatter)
|
| 60 |
+
|
| 61 |
+
logger.addHandler(file_handler)
|
| 62 |
+
logger.addHandler(console_handler)
|
| 63 |
+
|
| 64 |
+
return logger
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# Initialize logger
|
| 68 |
+
logger = setup_logging()
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def log(*a):
|
| 72 |
+
"""Legacy log function for backward compatibility"""
|
| 73 |
+
logger.info(" ".join(str(x) for x in a))
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def set_log_file(log_file_name: str) -> None:
|
| 77 |
+
"""Reconfigure the global logger to write to a specific log file."""
|
| 78 |
+
try:
|
| 79 |
+
LOGS_DIR.mkdir(exist_ok=True)
|
| 80 |
+
except Exception:
|
| 81 |
+
pass
|
| 82 |
+
|
| 83 |
+
global logger
|
| 84 |
+
target_logger = logging.getLogger('warp_api')
|
| 85 |
+
|
| 86 |
+
for handler in target_logger.handlers[:]:
|
| 87 |
+
try:
|
| 88 |
+
target_logger.removeHandler(handler)
|
| 89 |
+
try:
|
| 90 |
+
handler.close()
|
| 91 |
+
except Exception:
|
| 92 |
+
pass
|
| 93 |
+
except Exception:
|
| 94 |
+
pass
|
| 95 |
+
|
| 96 |
+
file_handler = RotatingFileHandler(
|
| 97 |
+
LOGS_DIR / log_file_name,
|
| 98 |
+
maxBytes=10*1024*1024,
|
| 99 |
+
backupCount=5,
|
| 100 |
+
encoding='utf-8'
|
| 101 |
+
)
|
| 102 |
+
file_handler.setLevel(logging.DEBUG)
|
| 103 |
+
|
| 104 |
+
console_handler = logging.StreamHandler()
|
| 105 |
+
console_handler.setLevel(logging.INFO)
|
| 106 |
+
|
| 107 |
+
formatter = logging.Formatter(
|
| 108 |
+
'%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s'
|
| 109 |
+
)
|
| 110 |
+
file_handler.setFormatter(formatter)
|
| 111 |
+
console_handler.setFormatter(formatter)
|
| 112 |
+
|
| 113 |
+
target_logger.addHandler(file_handler)
|
| 114 |
+
target_logger.addHandler(console_handler)
|
| 115 |
+
|
| 116 |
+
logger = target_logger
|
| 117 |
+
|
| 118 |
+
try:
|
| 119 |
+
logger.info(f"Logging redirected to: {LOGS_DIR / log_file_name}")
|
| 120 |
+
except Exception:
|
| 121 |
+
pass
|
warp2protobuf/core/protobuf.py
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Protobuf runtime for Warp API
|
| 5 |
+
|
| 6 |
+
Handles protobuf compilation, message creation, and request building.
|
| 7 |
+
"""
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import json
|
| 11 |
+
import time
|
| 12 |
+
import uuid
|
| 13 |
+
import pathlib
|
| 14 |
+
import tempfile
|
| 15 |
+
from typing import Any, Dict, List, Optional, Tuple
|
| 16 |
+
|
| 17 |
+
from google.protobuf import descriptor_pool, descriptor_pb2
|
| 18 |
+
from google.protobuf.descriptor import FieldDescriptor as FD
|
| 19 |
+
from google.protobuf.message_factory import GetMessageClass
|
| 20 |
+
from google.protobuf import struct_pb2
|
| 21 |
+
|
| 22 |
+
from ..config.settings import PROTO_DIR, CLIENT_VERSION, OS_CATEGORY, OS_NAME, OS_VERSION, TEXT_FIELD_NAMES, PATH_HINT_BONUS
|
| 23 |
+
from .logging import logger, log
|
| 24 |
+
|
| 25 |
+
# Global protobuf state
|
| 26 |
+
_pool: Optional[descriptor_pool.DescriptorPool] = None
|
| 27 |
+
ALL_MSGS: List[str] = []
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _find_proto_files(root: pathlib.Path) -> List[str]:
|
| 31 |
+
"""Find necessary .proto files in the given directory, excluding problematic test files"""
|
| 32 |
+
if not root.exists():
|
| 33 |
+
return []
|
| 34 |
+
|
| 35 |
+
essential_files = [
|
| 36 |
+
"request.proto",
|
| 37 |
+
"response.proto",
|
| 38 |
+
"task.proto",
|
| 39 |
+
"attachment.proto",
|
| 40 |
+
"file_content.proto",
|
| 41 |
+
"input_context.proto",
|
| 42 |
+
"citations.proto"
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
found_files = []
|
| 46 |
+
for file_name in essential_files:
|
| 47 |
+
file_path = root / file_name
|
| 48 |
+
if file_path.exists():
|
| 49 |
+
found_files.append(str(file_path))
|
| 50 |
+
logger.debug(f"Found essential proto file: {file_name}")
|
| 51 |
+
|
| 52 |
+
if not found_files:
|
| 53 |
+
logger.warning("Essential proto files not found, scanning all files...")
|
| 54 |
+
exclude_patterns = [
|
| 55 |
+
"unittest", "test", "sample_messages", "java_features",
|
| 56 |
+
"legacy_features", "descriptor_test"
|
| 57 |
+
]
|
| 58 |
+
|
| 59 |
+
for proto_file in root.rglob("*.proto"):
|
| 60 |
+
file_name = proto_file.name.lower()
|
| 61 |
+
if not any(pattern in file_name for pattern in exclude_patterns):
|
| 62 |
+
found_files.append(str(proto_file))
|
| 63 |
+
|
| 64 |
+
logger.info(f"Selected {len(found_files)} proto files for compilation")
|
| 65 |
+
return found_files
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _build_descset(proto_files: List[str], includes: List[str]) -> bytes:
|
| 69 |
+
from grpc_tools import protoc
|
| 70 |
+
try:
|
| 71 |
+
from importlib.resources import files as pkg_files
|
| 72 |
+
tool_inc = str(pkg_files("grpc_tools").joinpath("_proto"))
|
| 73 |
+
except Exception:
|
| 74 |
+
tool_inc = None
|
| 75 |
+
|
| 76 |
+
outdir = pathlib.Path(tempfile.mkdtemp(prefix="desc_"))
|
| 77 |
+
out = outdir / "bundle.pb"
|
| 78 |
+
args = ["protoc", f"--descriptor_set_out={out}", "--include_imports"]
|
| 79 |
+
for inc in includes:
|
| 80 |
+
args.append(f"-I{inc}")
|
| 81 |
+
if tool_inc:
|
| 82 |
+
args.append(f"-I{tool_inc}")
|
| 83 |
+
args.extend(proto_files)
|
| 84 |
+
rc = protoc.main(args)
|
| 85 |
+
if rc != 0 or not out.exists():
|
| 86 |
+
raise RuntimeError("protoc failed to produce descriptor set")
|
| 87 |
+
return out.read_bytes()
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _load_pool_from_descset(descset: bytes):
|
| 91 |
+
global _pool, ALL_MSGS
|
| 92 |
+
fds = descriptor_pb2.FileDescriptorSet()
|
| 93 |
+
fds.ParseFromString(descset)
|
| 94 |
+
pool = descriptor_pool.DescriptorPool()
|
| 95 |
+
for fd in fds.file:
|
| 96 |
+
pool.Add(fd)
|
| 97 |
+
names: List[str] = []
|
| 98 |
+
for fd in fds.file:
|
| 99 |
+
pkg = fd.package
|
| 100 |
+
def walk(m, prefix):
|
| 101 |
+
full = f"{prefix}.{m.name}" if prefix else m.name
|
| 102 |
+
names.append(full)
|
| 103 |
+
for nested in m.nested_type:
|
| 104 |
+
walk(nested, full)
|
| 105 |
+
for m in fd.message_type:
|
| 106 |
+
walk(m, pkg)
|
| 107 |
+
_pool, ALL_MSGS = pool, names
|
| 108 |
+
log(f"proto loaded: {len(ALL_MSGS)} message type(s)")
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def ensure_proto_runtime():
|
| 112 |
+
if _pool is not None:
|
| 113 |
+
return
|
| 114 |
+
files = _find_proto_files(PROTO_DIR)
|
| 115 |
+
if not files:
|
| 116 |
+
raise RuntimeError(f"No .proto found under {PROTO_DIR}")
|
| 117 |
+
desc = _build_descset(files, [str(PROTO_DIR)])
|
| 118 |
+
_load_pool_from_descset(desc)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def msg_cls(full: str):
|
| 122 |
+
desc = _pool.FindMessageTypeByName(full) # type: ignore
|
| 123 |
+
return GetMessageClass(desc)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _list_text_paths(desc, max_depth=6):
|
| 127 |
+
out: List[Tuple[List[FD], int]] = []
|
| 128 |
+
def walk(cur_desc, cur_path: List[FD], depth: int):
|
| 129 |
+
if depth > max_depth:
|
| 130 |
+
return
|
| 131 |
+
for f in cur_desc.fields:
|
| 132 |
+
base = 0
|
| 133 |
+
if f.name.lower() in TEXT_FIELD_NAMES: base += 10
|
| 134 |
+
for hint in PATH_HINT_BONUS:
|
| 135 |
+
if hint in f.name.lower(): base += 2
|
| 136 |
+
if f.type == FD.TYPE_STRING:
|
| 137 |
+
out.append((cur_path + [f], base + depth))
|
| 138 |
+
elif f.type == FD.TYPE_MESSAGE:
|
| 139 |
+
walk(f.message_type, cur_path + [f], depth + 1)
|
| 140 |
+
walk(desc, [], 0)
|
| 141 |
+
return out
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _pick_best_request_schema() -> Tuple[str, List[FD]]:
|
| 145 |
+
ensure_proto_runtime()
|
| 146 |
+
try:
|
| 147 |
+
request_type = "warp.multi_agent.v1.Request"
|
| 148 |
+
d = _pool.FindMessageTypeByName(request_type) # type: ignore
|
| 149 |
+
path_names = ["input", "user_inputs", "inputs", "user_query", "query"]
|
| 150 |
+
path_fields = []
|
| 151 |
+
current_desc = d
|
| 152 |
+
|
| 153 |
+
for field_name in path_names:
|
| 154 |
+
field = current_desc.fields_by_name.get(field_name)
|
| 155 |
+
if not field:
|
| 156 |
+
raise RuntimeError(f"Field '{field_name}' not found")
|
| 157 |
+
path_fields.append(field)
|
| 158 |
+
if field.type == FD.TYPE_MESSAGE:
|
| 159 |
+
current_desc = field.message_type
|
| 160 |
+
|
| 161 |
+
log("using modern request format:", request_type, " :: ", ".".join(path_names))
|
| 162 |
+
return request_type, path_fields
|
| 163 |
+
|
| 164 |
+
except Exception as e:
|
| 165 |
+
log(f"Failed to use modern format, falling back to auto-detection: {e}")
|
| 166 |
+
best: Optional[Tuple[str, List[FD], int]] = None
|
| 167 |
+
for full in ALL_MSGS:
|
| 168 |
+
try:
|
| 169 |
+
d = _pool.FindMessageTypeByName(full) # type: ignore
|
| 170 |
+
except Exception:
|
| 171 |
+
continue
|
| 172 |
+
name_bias = 0
|
| 173 |
+
lname = full.lower()
|
| 174 |
+
for kw, w in (("request", 8), ("multi_agent", 6), ("multiagent", 6),
|
| 175 |
+
("chat", 5), ("client", 2), ("message", 1), ("input", 1)):
|
| 176 |
+
if kw in lname: name_bias += w
|
| 177 |
+
for path, score in _list_text_paths(d):
|
| 178 |
+
total = score + name_bias + max(0, 6 - len(path))
|
| 179 |
+
if best is None or total > best[2]:
|
| 180 |
+
best = (full, path, total)
|
| 181 |
+
if not best:
|
| 182 |
+
raise RuntimeError("Could not auto-detect request root & text field from proto/")
|
| 183 |
+
full, path, _ = best
|
| 184 |
+
log("auto-detected request:", full, " :: ", ".".join(f.name for f in path))
|
| 185 |
+
return full, path
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
_REQ_CACHE: Optional[Tuple[str, List[FD]]] = None
|
| 189 |
+
|
| 190 |
+
def get_request_schema() -> Tuple[str, List[FD]]:
|
| 191 |
+
global _REQ_CACHE
|
| 192 |
+
if _REQ_CACHE is None:
|
| 193 |
+
_REQ_CACHE = _pick_best_request_schema()
|
| 194 |
+
return _REQ_CACHE
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def _set_text_at_path(msg, path_fields: List[FD], text: str):
|
| 198 |
+
cur = msg
|
| 199 |
+
for i, f in enumerate(path_fields):
|
| 200 |
+
last = (i == len(path_fields) - 1)
|
| 201 |
+
try:
|
| 202 |
+
is_repeated = f.is_repeated
|
| 203 |
+
except AttributeError:
|
| 204 |
+
is_repeated = (f.label == FD.LABEL_REPEATED)
|
| 205 |
+
|
| 206 |
+
if is_repeated:
|
| 207 |
+
rep = getattr(cur, f.name)
|
| 208 |
+
if f.type == FD.TYPE_MESSAGE:
|
| 209 |
+
cur = rep.add()
|
| 210 |
+
elif f.type == FD.TYPE_STRING:
|
| 211 |
+
if not last: raise TypeError(f"path continues after repeated string field '{f.name}'")
|
| 212 |
+
rep.append(text); return
|
| 213 |
+
else:
|
| 214 |
+
raise TypeError(f"unsupported repeated scalar at '{f.name}'")
|
| 215 |
+
else:
|
| 216 |
+
if f.type == FD.TYPE_MESSAGE:
|
| 217 |
+
cur = getattr(cur, f.name)
|
| 218 |
+
if last:
|
| 219 |
+
raise TypeError(f"last field '{f.name}' is a message, not string")
|
| 220 |
+
elif f.type == FD.TYPE_STRING:
|
| 221 |
+
if not last: raise TypeError(f"path continues after string field '{f.name}'")
|
| 222 |
+
setattr(cur, f.name, text); return
|
| 223 |
+
else:
|
| 224 |
+
raise TypeError(f"unsupported scalar at '{f.name}'")
|
| 225 |
+
raise RuntimeError("failed to set text")
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def build_request_bytes(user_text: str, model: str = "auto") -> bytes:
|
| 229 |
+
from ..config.models import get_model_config
|
| 230 |
+
|
| 231 |
+
full, path = get_request_schema()
|
| 232 |
+
Cls = msg_cls(full)
|
| 233 |
+
msg = Cls()
|
| 234 |
+
_set_text_at_path(msg, path, user_text)
|
| 235 |
+
|
| 236 |
+
if hasattr(msg, 'settings'):
|
| 237 |
+
settings = msg.settings
|
| 238 |
+
if hasattr(settings, 'model_config'):
|
| 239 |
+
model_config_dict = get_model_config(model)
|
| 240 |
+
model_config = settings.model_config
|
| 241 |
+
model_config.base = model_config_dict["base"]
|
| 242 |
+
model_config.planning = model_config_dict["planning"]
|
| 243 |
+
model_config.coding = model_config_dict["coding"]
|
| 244 |
+
logger.debug(f"Set model config: base={model_config.base}, planning={model_config.planning}, coding={model_config.coding}")
|
| 245 |
+
|
| 246 |
+
settings.rules_enabled = False
|
| 247 |
+
settings.web_context_retrieval_enabled = False
|
| 248 |
+
settings.supports_parallel_tool_calls = False
|
| 249 |
+
settings.planning_enabled = False
|
| 250 |
+
settings.supports_create_files = False
|
| 251 |
+
settings.supports_long_running_commands = False
|
| 252 |
+
settings.supports_todos_ui = False
|
| 253 |
+
settings.supports_linked_code_blocks = False
|
| 254 |
+
|
| 255 |
+
settings.use_anthropic_text_editor_tools = False
|
| 256 |
+
settings.warp_drive_context_enabled = False
|
| 257 |
+
settings.should_preserve_file_content_in_history = True
|
| 258 |
+
|
| 259 |
+
try:
|
| 260 |
+
tool_types = []
|
| 261 |
+
settings.supported_tools[:] = tool_types
|
| 262 |
+
logger.debug(f"Set supported_tools (legacy): {tool_types}")
|
| 263 |
+
except Exception as e:
|
| 264 |
+
logger.debug(f"Could not set supported_tools: {e}")
|
| 265 |
+
|
| 266 |
+
logger.debug("Applied all valid Settings fields based on proto definition")
|
| 267 |
+
|
| 268 |
+
if hasattr(msg, 'metadata'):
|
| 269 |
+
metadata = msg.metadata
|
| 270 |
+
metadata.conversation_id = f"rest-api-{uuid.uuid4().hex[:8]}"
|
| 271 |
+
|
| 272 |
+
rootd = msg.DESCRIPTOR
|
| 273 |
+
for fn, val in (
|
| 274 |
+
("client_version", CLIENT_VERSION),
|
| 275 |
+
("version", CLIENT_VERSION),
|
| 276 |
+
("os_name", OS_NAME),
|
| 277 |
+
("os_category", OS_CATEGORY),
|
| 278 |
+
("os_version", OS_VERSION),
|
| 279 |
+
):
|
| 280 |
+
f = rootd.fields_by_name.get(fn)
|
| 281 |
+
if f and f.type == FD.TYPE_STRING and f.label == FD.LABEL_OPTIONAL:
|
| 282 |
+
setattr(msg, fn, val)
|
| 283 |
+
|
| 284 |
+
return msg.SerializeToString()
|
warp2protobuf/core/protobuf_utils.py
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Protobuf utility functions
|
| 5 |
+
|
| 6 |
+
Shared functions for protobuf encoding/decoding across the application.
|
| 7 |
+
"""
|
| 8 |
+
from typing import Any, Dict
|
| 9 |
+
from fastapi import HTTPException
|
| 10 |
+
from .logging import logger
|
| 11 |
+
from .protobuf import ensure_proto_runtime, msg_cls
|
| 12 |
+
from google.protobuf.json_format import MessageToDict
|
| 13 |
+
from google.protobuf import struct_pb2
|
| 14 |
+
from google.protobuf.descriptor import FieldDescriptor as _FD
|
| 15 |
+
from .server_message_data import decode_server_message_data, encode_server_message_data
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def protobuf_to_dict(protobuf_bytes: bytes, message_type: str) -> Dict:
|
| 22 |
+
"""将protobuf字节转换为字典"""
|
| 23 |
+
ensure_proto_runtime()
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
MessageClass = msg_cls(message_type)
|
| 27 |
+
message = MessageClass()
|
| 28 |
+
message.ParseFromString(protobuf_bytes)
|
| 29 |
+
|
| 30 |
+
data = MessageToDict(message, preserving_proto_field_name=True)
|
| 31 |
+
|
| 32 |
+
# 在转换阶段自动解析 server_message_data(Base64URL -> 结构化对象)
|
| 33 |
+
data = _decode_smd_inplace(data)
|
| 34 |
+
return data
|
| 35 |
+
|
| 36 |
+
except Exception as e:
|
| 37 |
+
logger.error(f"Protobuf解码失败: {e}")
|
| 38 |
+
raise HTTPException(500, f"Protobuf解码失败: {e}")
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def dict_to_protobuf_bytes(data_dict: Dict, message_type: str = "warp.multi_agent.v1.Request") -> bytes:
|
| 45 |
+
"""字典转protobuf字节的包装函数"""
|
| 46 |
+
ensure_proto_runtime()
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
MessageClass = msg_cls(message_type)
|
| 50 |
+
message = MessageClass()
|
| 51 |
+
|
| 52 |
+
# 在转换阶段自动处理 server_message_data(对象 -> Base64URL 字符串)
|
| 53 |
+
safe_dict = _encode_smd_inplace(data_dict)
|
| 54 |
+
|
| 55 |
+
_populate_protobuf_from_dict(message, safe_dict, path="$")
|
| 56 |
+
|
| 57 |
+
return message.SerializeToString()
|
| 58 |
+
|
| 59 |
+
except Exception as e:
|
| 60 |
+
logger.error(f"Protobuf编码失败: {e}")
|
| 61 |
+
raise HTTPException(500, f"Protobuf编码失败: {e}")
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def _fill_google_value_dynamic(value_msg: Any, py_value: Any) -> None:
|
| 67 |
+
"""在动态 google.protobuf.Value 消息上填充 Python 值(不创建 struct_pb2.Value 实例)。"""
|
| 68 |
+
try:
|
| 69 |
+
if py_value is None:
|
| 70 |
+
setattr(value_msg, "null_value", 0)
|
| 71 |
+
return
|
| 72 |
+
if isinstance(py_value, bool):
|
| 73 |
+
setattr(value_msg, "bool_value", bool(py_value))
|
| 74 |
+
return
|
| 75 |
+
if isinstance(py_value, (int, float)):
|
| 76 |
+
setattr(value_msg, "number_value", float(py_value))
|
| 77 |
+
return
|
| 78 |
+
if isinstance(py_value, str):
|
| 79 |
+
setattr(value_msg, "string_value", py_value)
|
| 80 |
+
return
|
| 81 |
+
if isinstance(py_value, dict):
|
| 82 |
+
struct_value = getattr(value_msg, "struct_value")
|
| 83 |
+
_fill_google_struct_dynamic(struct_value, py_value)
|
| 84 |
+
return
|
| 85 |
+
if isinstance(py_value, list):
|
| 86 |
+
list_value = getattr(value_msg, "list_value")
|
| 87 |
+
values_rep = getattr(list_value, "values")
|
| 88 |
+
for item in py_value:
|
| 89 |
+
sub = values_rep.add()
|
| 90 |
+
_fill_google_value_dynamic(sub, item)
|
| 91 |
+
return
|
| 92 |
+
setattr(value_msg, "string_value", str(py_value))
|
| 93 |
+
except Exception as e:
|
| 94 |
+
logger.warning(f"填充 google.protobuf.Value 失败: {e}")
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _fill_google_struct_dynamic(struct_msg: Any, py_dict: Dict[str, Any]) -> None:
|
| 100 |
+
"""在动态 google.protobuf.Struct 上填充 Python dict(不使用 struct_pb2.Struct.update)。"""
|
| 101 |
+
try:
|
| 102 |
+
fields_map = getattr(struct_msg, "fields")
|
| 103 |
+
for k, v in py_dict.items():
|
| 104 |
+
sub_val = fields_map[k]
|
| 105 |
+
_fill_google_value_dynamic(sub_val, v)
|
| 106 |
+
except Exception as e:
|
| 107 |
+
logger.warning(f"填充 google.protobuf.Struct 失败: {e}")
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def _python_to_struct_value(py_value: Any) -> struct_pb2.Value:
|
| 113 |
+
v = struct_pb2.Value()
|
| 114 |
+
if py_value is None:
|
| 115 |
+
v.null_value = struct_pb2.NULL_VALUE
|
| 116 |
+
elif isinstance(py_value, bool):
|
| 117 |
+
v.bool_value = bool(py_value)
|
| 118 |
+
elif isinstance(py_value, (int, float)):
|
| 119 |
+
v.number_value = float(py_value)
|
| 120 |
+
elif isinstance(py_value, str):
|
| 121 |
+
v.string_value = py_value
|
| 122 |
+
elif isinstance(py_value, dict):
|
| 123 |
+
s = struct_pb2.Struct()
|
| 124 |
+
s.update(py_value)
|
| 125 |
+
v.struct_value.CopyFrom(s)
|
| 126 |
+
elif isinstance(py_value, list):
|
| 127 |
+
lv = struct_pb2.ListValue()
|
| 128 |
+
for item in py_value:
|
| 129 |
+
lv.values.append(_python_to_struct_value(item))
|
| 130 |
+
v.list_value.CopyFrom(lv)
|
| 131 |
+
else:
|
| 132 |
+
v.string_value = str(py_value)
|
| 133 |
+
return v
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _populate_protobuf_from_dict(proto_msg, data_dict: Dict, path: str = "$"):
|
| 139 |
+
for key, value in data_dict.items():
|
| 140 |
+
current_path = f"{path}.{key}"
|
| 141 |
+
if not hasattr(proto_msg, key):
|
| 142 |
+
logger.warning(f"忽略未知字段: {current_path}")
|
| 143 |
+
continue
|
| 144 |
+
|
| 145 |
+
field = getattr(proto_msg, key)
|
| 146 |
+
fd = None
|
| 147 |
+
descriptor = getattr(proto_msg, "DESCRIPTOR", None)
|
| 148 |
+
if descriptor is not None:
|
| 149 |
+
fd = descriptor.fields_by_name.get(key)
|
| 150 |
+
|
| 151 |
+
try:
|
| 152 |
+
if (
|
| 153 |
+
fd is not None
|
| 154 |
+
and fd.type == _FD.TYPE_MESSAGE
|
| 155 |
+
and fd.message_type is not None
|
| 156 |
+
and fd.message_type.full_name == "google.protobuf.Struct"
|
| 157 |
+
and isinstance(value, dict)
|
| 158 |
+
):
|
| 159 |
+
_fill_google_struct_dynamic(field, value)
|
| 160 |
+
continue
|
| 161 |
+
except Exception as e:
|
| 162 |
+
logger.warning(f"处理 Struct 字段 {current_path} 失败: {e}")
|
| 163 |
+
|
| 164 |
+
if isinstance(field, struct_pb2.Struct) and isinstance(value, dict):
|
| 165 |
+
try:
|
| 166 |
+
field.update(value)
|
| 167 |
+
except Exception as e:
|
| 168 |
+
logger.warning(f"填充Struct失败: {current_path}: {e}")
|
| 169 |
+
continue
|
| 170 |
+
|
| 171 |
+
try:
|
| 172 |
+
if (
|
| 173 |
+
fd is not None
|
| 174 |
+
and fd.type == _FD.TYPE_MESSAGE
|
| 175 |
+
and fd.message_type is not None
|
| 176 |
+
and fd.message_type.GetOptions().map_entry
|
| 177 |
+
and isinstance(value, dict)
|
| 178 |
+
):
|
| 179 |
+
value_desc = fd.message_type.fields_by_name.get("value")
|
| 180 |
+
for mk, mv in value.items():
|
| 181 |
+
try:
|
| 182 |
+
if value_desc is not None and value_desc.type == _FD.TYPE_MESSAGE:
|
| 183 |
+
if value_desc.message_type is not None and value_desc.message_type.full_name == "google.protobuf.Value":
|
| 184 |
+
_fill_google_value_dynamic(field[mk], mv)
|
| 185 |
+
else:
|
| 186 |
+
sub_msg = field[mk]
|
| 187 |
+
if isinstance(mv, dict):
|
| 188 |
+
_populate_protobuf_from_dict(sub_msg, mv, path=f"{current_path}.{mk}")
|
| 189 |
+
else:
|
| 190 |
+
try:
|
| 191 |
+
logger.warning(f"map值类型不匹配,期望message: {current_path}.{mk}")
|
| 192 |
+
except Exception:
|
| 193 |
+
pass
|
| 194 |
+
else:
|
| 195 |
+
field[mk] = mv
|
| 196 |
+
except Exception as me:
|
| 197 |
+
logger.warning(f"设置 map 字段 {current_path}.{mk} 失败: {me}")
|
| 198 |
+
continue
|
| 199 |
+
except Exception as e:
|
| 200 |
+
logger.warning(f"处理 map 字段 {current_path} 失败: {e}")
|
| 201 |
+
|
| 202 |
+
if isinstance(value, dict):
|
| 203 |
+
try:
|
| 204 |
+
_populate_protobuf_from_dict(field, value, path=current_path)
|
| 205 |
+
except Exception as e:
|
| 206 |
+
logger.error(f"填充子消息失败: {current_path}: {e}")
|
| 207 |
+
raise
|
| 208 |
+
elif isinstance(value, list):
|
| 209 |
+
# 处理 repeated enum:允许传入字符串名称或数字
|
| 210 |
+
try:
|
| 211 |
+
if fd is not None and fd.type == _FD.TYPE_ENUM:
|
| 212 |
+
enum_desc = getattr(fd, "enum_type", None)
|
| 213 |
+
resolved_values = []
|
| 214 |
+
for item in value:
|
| 215 |
+
if isinstance(item, str):
|
| 216 |
+
ev = enum_desc.values_by_name.get(item) if enum_desc is not None else None
|
| 217 |
+
if ev is not None:
|
| 218 |
+
resolved_values.append(ev.number)
|
| 219 |
+
else:
|
| 220 |
+
try:
|
| 221 |
+
resolved_values.append(int(item))
|
| 222 |
+
except Exception:
|
| 223 |
+
logger.warning(f"无法解析枚举值 '{item}' 为 {current_path},已忽略")
|
| 224 |
+
else:
|
| 225 |
+
try:
|
| 226 |
+
resolved_values.append(int(item))
|
| 227 |
+
except Exception:
|
| 228 |
+
logger.warning(f"无法转换枚举值 {item} 为整数: {current_path}")
|
| 229 |
+
field.extend(resolved_values)
|
| 230 |
+
continue
|
| 231 |
+
except Exception as e:
|
| 232 |
+
logger.warning(f"处理 repeated enum 字段 {current_path} 失败: {e}")
|
| 233 |
+
if value and isinstance(value[0], dict):
|
| 234 |
+
try:
|
| 235 |
+
for idx, item in enumerate(value):
|
| 236 |
+
new_item = field.add() # type: ignore[attr-defined]
|
| 237 |
+
_populate_protobuf_from_dict(new_item, item, path=f"{current_path}[{idx}]")
|
| 238 |
+
except Exception as e:
|
| 239 |
+
logger.warning(f"填充复合数组失败 {current_path}: {e}")
|
| 240 |
+
else:
|
| 241 |
+
try:
|
| 242 |
+
field.extend(value)
|
| 243 |
+
except Exception as e:
|
| 244 |
+
logger.warning(f"设置数组字段 {current_path} 失败: {e}")
|
| 245 |
+
else:
|
| 246 |
+
if key in ["in_progress", "resume_conversation"]:
|
| 247 |
+
field.SetInParent()
|
| 248 |
+
else:
|
| 249 |
+
try:
|
| 250 |
+
# 处理标量 enum:允许传入字符串名称或数字
|
| 251 |
+
if fd is not None and fd.type == _FD.TYPE_ENUM:
|
| 252 |
+
enum_desc = getattr(fd, "enum_type", None)
|
| 253 |
+
if isinstance(value, str):
|
| 254 |
+
ev = enum_desc.values_by_name.get(value) if enum_desc is not None else None
|
| 255 |
+
if ev is not None:
|
| 256 |
+
setattr(proto_msg, key, ev.number)
|
| 257 |
+
continue
|
| 258 |
+
try:
|
| 259 |
+
setattr(proto_msg, key, int(value))
|
| 260 |
+
continue
|
| 261 |
+
except Exception:
|
| 262 |
+
pass
|
| 263 |
+
# 其余情况直接赋值,若类型不匹配由底层抛错
|
| 264 |
+
setattr(proto_msg, key, value)
|
| 265 |
+
except Exception as e:
|
| 266 |
+
logger.warning(f"设置字段 {current_path} 失败: {e}")
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
# ===== server_message_data 递归处理 =====
|
| 270 |
+
|
| 271 |
+
def _encode_smd_inplace(obj: Any) -> Any:
|
| 272 |
+
if isinstance(obj, dict):
|
| 273 |
+
new_d: Dict[str, Any] = {}
|
| 274 |
+
for k, v in obj.items():
|
| 275 |
+
if k in ("server_message_data", "serverMessageData") and isinstance(v, dict):
|
| 276 |
+
try:
|
| 277 |
+
b64 = encode_server_message_data(
|
| 278 |
+
uuid=v.get("uuid"),
|
| 279 |
+
seconds=v.get("seconds"),
|
| 280 |
+
nanos=v.get("nanos"),
|
| 281 |
+
)
|
| 282 |
+
new_d[k] = b64
|
| 283 |
+
except Exception:
|
| 284 |
+
new_d[k] = v
|
| 285 |
+
else:
|
| 286 |
+
new_d[k] = _encode_smd_inplace(v)
|
| 287 |
+
return new_d
|
| 288 |
+
elif isinstance(obj, list):
|
| 289 |
+
return [_encode_smd_inplace(x) for x in obj]
|
| 290 |
+
else:
|
| 291 |
+
return obj
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def _decode_smd_inplace(obj: Any) -> Any:
|
| 295 |
+
if isinstance(obj, dict):
|
| 296 |
+
new_d: Dict[str, Any] = {}
|
| 297 |
+
for k, v in obj.items():
|
| 298 |
+
if k in ("server_message_data", "serverMessageData") and isinstance(v, str):
|
| 299 |
+
try:
|
| 300 |
+
dec = decode_server_message_data(v)
|
| 301 |
+
new_d[k] = dec
|
| 302 |
+
except Exception:
|
| 303 |
+
new_d[k] = v
|
| 304 |
+
else:
|
| 305 |
+
new_d[k] = _decode_smd_inplace(v)
|
| 306 |
+
return new_d
|
| 307 |
+
elif isinstance(obj, list):
|
| 308 |
+
return [_decode_smd_inplace(x) for x in obj]
|
| 309 |
+
else:
|
| 310 |
+
return obj
|
warp2protobuf/core/schema_sanitizer.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
Shared utilities to validate and sanitize MCP tool input_schema in request packets.
|
| 4 |
+
Ensures JSON Schema correctness, removes empty values, and enforces non-empty
|
| 5 |
+
`type` and `description` for each property. Special handling for `headers`.
|
| 6 |
+
"""
|
| 7 |
+
from typing import Any, Dict, List
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def _is_empty_value(value: Any) -> bool:
|
| 11 |
+
if value is None:
|
| 12 |
+
return True
|
| 13 |
+
if isinstance(value, str) and value.strip() == "":
|
| 14 |
+
return True
|
| 15 |
+
if isinstance(value, (list, dict)) and len(value) == 0:
|
| 16 |
+
return True
|
| 17 |
+
return False
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _deep_clean(value: Any) -> Any:
|
| 21 |
+
if isinstance(value, dict):
|
| 22 |
+
cleaned: Dict[str, Any] = {}
|
| 23 |
+
for k, v in value.items():
|
| 24 |
+
vv = _deep_clean(v)
|
| 25 |
+
if _is_empty_value(vv):
|
| 26 |
+
continue
|
| 27 |
+
cleaned[k] = vv
|
| 28 |
+
return cleaned
|
| 29 |
+
if isinstance(value, list):
|
| 30 |
+
cleaned_list = []
|
| 31 |
+
for item in value:
|
| 32 |
+
ii = _deep_clean(item)
|
| 33 |
+
if _is_empty_value(ii):
|
| 34 |
+
continue
|
| 35 |
+
cleaned_list.append(ii)
|
| 36 |
+
return cleaned_list
|
| 37 |
+
if isinstance(value, str):
|
| 38 |
+
return value.strip()
|
| 39 |
+
return value
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _infer_type_for_property(prop_name: str) -> str:
|
| 43 |
+
name = prop_name.lower()
|
| 44 |
+
if name in ("url", "uri", "href", "link"):
|
| 45 |
+
return "string"
|
| 46 |
+
if name in ("headers", "options", "params", "payload", "data"):
|
| 47 |
+
return "object"
|
| 48 |
+
return "string"
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _ensure_property_schema(name: str, schema: Dict[str, Any]) -> Dict[str, Any]:
|
| 52 |
+
prop = dict(schema) if isinstance(schema, dict) else {}
|
| 53 |
+
prop = _deep_clean(prop)
|
| 54 |
+
|
| 55 |
+
# Enforce type & description
|
| 56 |
+
if "type" not in prop or not isinstance(prop.get("type"), str) or not prop["type"].strip():
|
| 57 |
+
prop["type"] = _infer_type_for_property(name)
|
| 58 |
+
if "description" not in prop or not isinstance(prop.get("description"), str) or not prop["description"].strip():
|
| 59 |
+
prop["description"] = f"{name} parameter"
|
| 60 |
+
|
| 61 |
+
# Special handling for headers
|
| 62 |
+
if name.lower() == "headers":
|
| 63 |
+
prop["type"] = "object"
|
| 64 |
+
headers_props = prop.get("properties")
|
| 65 |
+
if not isinstance(headers_props, dict):
|
| 66 |
+
headers_props = {}
|
| 67 |
+
headers_props = _deep_clean(headers_props)
|
| 68 |
+
if not headers_props:
|
| 69 |
+
headers_props = {
|
| 70 |
+
"user-agent": {
|
| 71 |
+
"type": "string",
|
| 72 |
+
"description": "User-Agent header for the request",
|
| 73 |
+
}
|
| 74 |
+
}
|
| 75 |
+
else:
|
| 76 |
+
fixed_headers: Dict[str, Any] = {}
|
| 77 |
+
for hk, hv in headers_props.items():
|
| 78 |
+
sub = _deep_clean(hv if isinstance(hv, dict) else {})
|
| 79 |
+
if "type" not in sub or not isinstance(sub.get("type"), str) or not sub["type"].strip():
|
| 80 |
+
sub["type"] = "string"
|
| 81 |
+
if "description" not in sub or not isinstance(sub.get("description"), str) or not sub["description"].strip():
|
| 82 |
+
sub["description"] = f"{hk} header"
|
| 83 |
+
fixed_headers[hk] = sub
|
| 84 |
+
headers_props = fixed_headers
|
| 85 |
+
prop["properties"] = headers_props
|
| 86 |
+
if isinstance(prop.get("required"), list):
|
| 87 |
+
req = [r for r in prop["required"] if isinstance(r, str) and r in headers_props]
|
| 88 |
+
if req:
|
| 89 |
+
prop["required"] = req
|
| 90 |
+
else:
|
| 91 |
+
prop.pop("required", None)
|
| 92 |
+
if isinstance(prop.get("additionalProperties"), dict) and len(prop["additionalProperties"]) == 0:
|
| 93 |
+
prop.pop("additionalProperties", None)
|
| 94 |
+
|
| 95 |
+
return prop
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def _sanitize_json_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
|
| 99 |
+
s = _deep_clean(schema if isinstance(schema, dict) else {})
|
| 100 |
+
|
| 101 |
+
# If properties exist, assume object type
|
| 102 |
+
if "properties" in s and not isinstance(s.get("type"), str):
|
| 103 |
+
s["type"] = "object"
|
| 104 |
+
|
| 105 |
+
# Normalize $schema
|
| 106 |
+
if "$schema" in s and not isinstance(s["$schema"], str):
|
| 107 |
+
s.pop("$schema", None)
|
| 108 |
+
if "$schema" not in s:
|
| 109 |
+
s["$schema"] = "http://json-schema.org/draft-07/schema#"
|
| 110 |
+
|
| 111 |
+
properties = s.get("properties")
|
| 112 |
+
if isinstance(properties, dict):
|
| 113 |
+
fixed_props: Dict[str, Any] = {}
|
| 114 |
+
for name, subschema in properties.items():
|
| 115 |
+
fixed_props[name] = _ensure_property_schema(name, subschema if isinstance(subschema, dict) else {})
|
| 116 |
+
s["properties"] = fixed_props
|
| 117 |
+
|
| 118 |
+
# Clean required list
|
| 119 |
+
if isinstance(s.get("required"), list):
|
| 120 |
+
if isinstance(properties, dict):
|
| 121 |
+
req = [r for r in s["required"] if isinstance(r, str) and r in properties]
|
| 122 |
+
else:
|
| 123 |
+
req = []
|
| 124 |
+
if req:
|
| 125 |
+
s["required"] = req
|
| 126 |
+
else:
|
| 127 |
+
s.pop("required", None)
|
| 128 |
+
|
| 129 |
+
# Remove empty additionalProperties object
|
| 130 |
+
if isinstance(s.get("additionalProperties"), dict) and len(s["additionalProperties"]) == 0:
|
| 131 |
+
s.pop("additionalProperties", None)
|
| 132 |
+
|
| 133 |
+
return s
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def sanitize_mcp_input_schema_in_packet(body: Dict[str, Any]) -> Dict[str, Any]:
|
| 137 |
+
"""Validate and sanitize mcp_context.tools[*].input_schema in the given packet.
|
| 138 |
+
|
| 139 |
+
- Removes empty values (empty strings, lists, dicts)
|
| 140 |
+
- Ensures each property has non-empty `type` and `description`
|
| 141 |
+
- Special-cases `headers` to include at least `user-agent` when empty
|
| 142 |
+
- Fixes `required` lists and general JSON Schema shape
|
| 143 |
+
"""
|
| 144 |
+
try:
|
| 145 |
+
body = _deep_clean(body)
|
| 146 |
+
candidate_roots: List[Dict[str, Any]] = []
|
| 147 |
+
if isinstance(body.get("json_data"), dict):
|
| 148 |
+
candidate_roots.append(body["json_data"])
|
| 149 |
+
candidate_roots.append(body)
|
| 150 |
+
|
| 151 |
+
for root in candidate_roots:
|
| 152 |
+
if not isinstance(root, dict):
|
| 153 |
+
continue
|
| 154 |
+
mcp_ctx = root.get("mcp_context")
|
| 155 |
+
if not isinstance(mcp_ctx, dict):
|
| 156 |
+
continue
|
| 157 |
+
tools = mcp_ctx.get("tools")
|
| 158 |
+
if not isinstance(tools, list):
|
| 159 |
+
continue
|
| 160 |
+
fixed_tools: List[Any] = []
|
| 161 |
+
for tool in tools:
|
| 162 |
+
if not isinstance(tool, dict):
|
| 163 |
+
fixed_tools.append(tool)
|
| 164 |
+
continue
|
| 165 |
+
tool_copy = dict(tool)
|
| 166 |
+
input_schema = tool_copy.get("input_schema") or tool_copy.get("inputSchema")
|
| 167 |
+
if isinstance(input_schema, dict):
|
| 168 |
+
tool_copy["input_schema"] = _sanitize_json_schema(input_schema)
|
| 169 |
+
if "inputSchema" in tool_copy:
|
| 170 |
+
tool_copy["inputSchema"] = tool_copy["input_schema"]
|
| 171 |
+
fixed_tools.append(_deep_clean(tool_copy))
|
| 172 |
+
mcp_ctx["tools"] = fixed_tools
|
| 173 |
+
return body
|
| 174 |
+
except Exception:
|
| 175 |
+
return body
|
warp2protobuf/core/server_message_data.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Helpers for encoding/decoding server_message_data values.
|
| 5 |
+
|
| 6 |
+
These are Base64URL-encoded proto3 messages with shape:
|
| 7 |
+
- field 1: string UUID (36 chars)
|
| 8 |
+
- field 3: google.protobuf.Timestamp (1=seconds, 2=nanos)
|
| 9 |
+
|
| 10 |
+
Supports UUID_ONLY, TIMESTAMP_ONLY, and UUID_AND_TIMESTAMP.
|
| 11 |
+
"""
|
| 12 |
+
from typing import Dict, Optional, Tuple
|
| 13 |
+
import base64
|
| 14 |
+
from datetime import datetime, timezone
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
from zoneinfo import ZoneInfo # Python 3.9+
|
| 18 |
+
except Exception:
|
| 19 |
+
ZoneInfo = None # type: ignore
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _b64url_decode_padded(s: str) -> bytes:
|
| 23 |
+
t = s.replace("-", "+").replace("_", "/")
|
| 24 |
+
pad = (-len(t)) % 4
|
| 25 |
+
if pad:
|
| 26 |
+
t += "=" * pad
|
| 27 |
+
return base64.b64decode(t)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _b64url_encode_nopad(b: bytes) -> str:
|
| 31 |
+
return base64.urlsafe_b64encode(b).decode("ascii").rstrip("=")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _read_varint(buf: bytes, i: int) -> Tuple[int, int]:
|
| 35 |
+
shift = 0
|
| 36 |
+
val = 0
|
| 37 |
+
while i < len(buf):
|
| 38 |
+
b = buf[i]
|
| 39 |
+
i += 1
|
| 40 |
+
val |= (b & 0x7F) << shift
|
| 41 |
+
if not (b & 0x80):
|
| 42 |
+
return val, i
|
| 43 |
+
shift += 7
|
| 44 |
+
if shift > 63:
|
| 45 |
+
break
|
| 46 |
+
raise ValueError("invalid varint")
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def _write_varint(v: int) -> bytes:
|
| 50 |
+
out = bytearray()
|
| 51 |
+
vv = int(v)
|
| 52 |
+
while True:
|
| 53 |
+
to_write = vv & 0x7F
|
| 54 |
+
vv >>= 7
|
| 55 |
+
if vv:
|
| 56 |
+
out.append(to_write | 0x80)
|
| 57 |
+
else:
|
| 58 |
+
out.append(to_write)
|
| 59 |
+
break
|
| 60 |
+
return bytes(out)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _make_key(field_no: int, wire_type: int) -> bytes:
|
| 64 |
+
return _write_varint((field_no << 3) | wire_type)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _decode_timestamp(buf: bytes) -> Tuple[Optional[int], Optional[int]]:
|
| 68 |
+
i = 0
|
| 69 |
+
seconds: Optional[int] = None
|
| 70 |
+
nanos: Optional[int] = None
|
| 71 |
+
while i < len(buf):
|
| 72 |
+
key, i = _read_varint(buf, i)
|
| 73 |
+
field_no = key >> 3
|
| 74 |
+
wt = key & 0x07
|
| 75 |
+
if wt == 0:
|
| 76 |
+
val, i = _read_varint(buf, i)
|
| 77 |
+
if field_no == 1:
|
| 78 |
+
seconds = int(val)
|
| 79 |
+
elif field_no == 2:
|
| 80 |
+
nanos = int(val)
|
| 81 |
+
elif wt == 2:
|
| 82 |
+
ln, i2 = _read_varint(buf, i)
|
| 83 |
+
i = i2 + ln
|
| 84 |
+
elif wt == 1:
|
| 85 |
+
i += 8
|
| 86 |
+
elif wt == 5:
|
| 87 |
+
i += 4
|
| 88 |
+
else:
|
| 89 |
+
break
|
| 90 |
+
return seconds, nanos
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _encode_timestamp(seconds: Optional[int], nanos: Optional[int]) -> bytes:
|
| 94 |
+
parts = bytearray()
|
| 95 |
+
if seconds is not None:
|
| 96 |
+
parts += _make_key(1, 0)
|
| 97 |
+
parts += _write_varint(int(seconds))
|
| 98 |
+
if nanos is not None:
|
| 99 |
+
parts += _make_key(2, 0)
|
| 100 |
+
parts += _write_varint(int(nanos))
|
| 101 |
+
return bytes(parts)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def decode_server_message_data(b64url: str) -> Dict:
|
| 105 |
+
try:
|
| 106 |
+
raw = _b64url_decode_padded(b64url)
|
| 107 |
+
except Exception as e:
|
| 108 |
+
return {"error": f"base64url decode failed: {e}"}
|
| 109 |
+
|
| 110 |
+
i = 0
|
| 111 |
+
uuid: Optional[str] = None
|
| 112 |
+
seconds: Optional[int] = None
|
| 113 |
+
nanos: Optional[int] = None
|
| 114 |
+
|
| 115 |
+
while i < len(raw):
|
| 116 |
+
key, i = _read_varint(raw, i)
|
| 117 |
+
field_no = key >> 3
|
| 118 |
+
wt = key & 0x07
|
| 119 |
+
if wt == 2:
|
| 120 |
+
ln, i2 = _read_varint(raw, i)
|
| 121 |
+
i = i2
|
| 122 |
+
data = raw[i:i+ln]
|
| 123 |
+
i += ln
|
| 124 |
+
if field_no == 1:
|
| 125 |
+
try:
|
| 126 |
+
uuid = data.decode("utf-8")
|
| 127 |
+
except Exception:
|
| 128 |
+
uuid = None
|
| 129 |
+
elif field_no == 3:
|
| 130 |
+
s, n = _decode_timestamp(data)
|
| 131 |
+
if s is not None:
|
| 132 |
+
seconds = s
|
| 133 |
+
if n is not None:
|
| 134 |
+
nanos = n
|
| 135 |
+
elif wt == 0:
|
| 136 |
+
_, i = _read_varint(raw, i)
|
| 137 |
+
elif wt == 1:
|
| 138 |
+
i += 8
|
| 139 |
+
elif wt == 5:
|
| 140 |
+
i += 4
|
| 141 |
+
else:
|
| 142 |
+
break
|
| 143 |
+
|
| 144 |
+
iso_utc: Optional[str] = None
|
| 145 |
+
iso_ny: Optional[str] = None
|
| 146 |
+
if seconds is not None:
|
| 147 |
+
micros = int((nanos or 0) / 1000)
|
| 148 |
+
dt = datetime.fromtimestamp(int(seconds), tz=timezone.utc).replace(microsecond=micros)
|
| 149 |
+
iso_utc = dt.isoformat().replace("+00:00", "Z")
|
| 150 |
+
if ZoneInfo is not None:
|
| 151 |
+
try:
|
| 152 |
+
iso_ny = dt.astimezone(ZoneInfo("America/New_York")).isoformat()
|
| 153 |
+
except Exception:
|
| 154 |
+
iso_ny = None
|
| 155 |
+
|
| 156 |
+
if uuid and (seconds is not None or nanos is not None):
|
| 157 |
+
t = "UUID_AND_TIMESTAMP"
|
| 158 |
+
elif uuid:
|
| 159 |
+
t = "UUID_ONLY"
|
| 160 |
+
elif seconds is not None or nanos is not None:
|
| 161 |
+
t = "TIMESTAMP_ONLY"
|
| 162 |
+
else:
|
| 163 |
+
t = "UNKNOWN"
|
| 164 |
+
|
| 165 |
+
return {
|
| 166 |
+
"uuid": uuid,
|
| 167 |
+
"seconds": seconds,
|
| 168 |
+
"nanos": nanos,
|
| 169 |
+
"iso_utc": iso_utc,
|
| 170 |
+
"iso_ny": iso_ny,
|
| 171 |
+
"type": t,
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def encode_server_message_data(uuid: Optional[str] = None,
|
| 176 |
+
seconds: Optional[int] = None,
|
| 177 |
+
nanos: Optional[int] = None) -> str:
|
| 178 |
+
parts = bytearray()
|
| 179 |
+
if uuid:
|
| 180 |
+
b = uuid.encode("utf-8")
|
| 181 |
+
parts += _make_key(1, 2)
|
| 182 |
+
parts += _write_varint(len(b))
|
| 183 |
+
parts += b
|
| 184 |
+
if seconds is not None or nanos is not None:
|
| 185 |
+
ts = _encode_timestamp(seconds, nanos)
|
| 186 |
+
parts += _make_key(3, 2)
|
| 187 |
+
parts += _write_varint(len(ts))
|
| 188 |
+
parts += ts
|
| 189 |
+
return _b64url_encode_nopad(bytes(parts))
|
warp2protobuf/core/session.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Global session management for Warp API
|
| 5 |
+
|
| 6 |
+
Manages fixed conversation_id and task context based on real packet analysis.
|
| 7 |
+
"""
|
| 8 |
+
import uuid
|
| 9 |
+
import time
|
| 10 |
+
import asyncio
|
| 11 |
+
from typing import Dict, List, Optional, Any
|
| 12 |
+
from dataclasses import dataclass, field
|
| 13 |
+
from .logging import logger
|
| 14 |
+
|
| 15 |
+
# 全局固定的conversation_id - 所有请求都使用这个ID
|
| 16 |
+
FIXED_CONVERSATION_ID = "5b48d359-0715-479e-a158-0a00f2dfea36"
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@dataclass
|
| 20 |
+
class SessionMessage:
|
| 21 |
+
"""Represents a message in the session history"""
|
| 22 |
+
id: str
|
| 23 |
+
role: str # "user", "assistant", "system", "tool"
|
| 24 |
+
content: str
|
| 25 |
+
tool_calls: Optional[List[Dict]] = None
|
| 26 |
+
tool_call_id: Optional[str] = None
|
| 27 |
+
timestamp: float = field(default_factory=time.time)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@dataclass
|
| 31 |
+
class SessionState:
|
| 32 |
+
"""Global session state for the fixed conversation"""
|
| 33 |
+
conversation_id: str = FIXED_CONVERSATION_ID
|
| 34 |
+
active_task_id: Optional[str] = None
|
| 35 |
+
messages: List[SessionMessage] = field(default_factory=list)
|
| 36 |
+
initialized: bool = False
|
| 37 |
+
created_at: float = field(default_factory=time.time)
|
| 38 |
+
last_activity: float = field(default_factory=time.time)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class GlobalSessionManager:
|
| 42 |
+
"""
|
| 43 |
+
Manages the global fixed session for Warp API.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
def __init__(self):
|
| 47 |
+
self._session = SessionState()
|
| 48 |
+
self._initialization_lock = asyncio.Lock()
|
| 49 |
+
logger.info(f"GlobalSessionManager initialized with fixed conversation_id: {FIXED_CONVERSATION_ID}")
|
| 50 |
+
|
| 51 |
+
def get_fixed_conversation_id(self) -> str:
|
| 52 |
+
return FIXED_CONVERSATION_ID
|
| 53 |
+
|
| 54 |
+
def add_message_from_openai(self, role: str, content: str, tool_calls: Optional[List[Dict]] = None, tool_call_id: Optional[str] = None) -> str:
|
| 55 |
+
message_id = f"msg-{uuid.uuid4().hex[:8]}"
|
| 56 |
+
message = SessionMessage(
|
| 57 |
+
id=message_id,
|
| 58 |
+
role=role,
|
| 59 |
+
content=content,
|
| 60 |
+
tool_calls=tool_calls,
|
| 61 |
+
tool_call_id=tool_call_id
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
self._session.messages.append(message)
|
| 65 |
+
self._session.last_activity = time.time()
|
| 66 |
+
|
| 67 |
+
logger.debug(f"Added {role} message to session: {content[:100]}...")
|
| 68 |
+
return message_id
|
| 69 |
+
|
| 70 |
+
def get_session_history(self) -> List[SessionMessage]:
|
| 71 |
+
return self._session.messages.copy()
|
| 72 |
+
|
| 73 |
+
def get_history_for_task_context(self) -> List[SessionMessage]:
|
| 74 |
+
return self._session.messages.copy()
|
| 75 |
+
|
| 76 |
+
def update_session_with_openai_messages(self, openai_messages: List[Dict[str, Any]]) -> None:
|
| 77 |
+
self._session.messages.clear()
|
| 78 |
+
for msg in openai_messages:
|
| 79 |
+
role = msg.get("role", "")
|
| 80 |
+
content = msg.get("content", "")
|
| 81 |
+
tool_calls = msg.get("tool_calls")
|
| 82 |
+
tool_call_id = msg.get("tool_call_id")
|
| 83 |
+
if not content and not tool_calls and role != "tool":
|
| 84 |
+
continue
|
| 85 |
+
self.add_message_from_openai(role, content, tool_calls, tool_call_id)
|
| 86 |
+
logger.debug(f"Updated session with {len(openai_messages)} OpenAI messages")
|
| 87 |
+
|
| 88 |
+
def extract_current_user_query(self, openai_messages: List[Dict[str, Any]]) -> Optional[str]:
|
| 89 |
+
for msg in reversed(openai_messages):
|
| 90 |
+
if msg.get("role") == "user":
|
| 91 |
+
query = msg.get("content", "")
|
| 92 |
+
logger.debug(f"Extracted current user query: {query[:100]}...")
|
| 93 |
+
return query
|
| 94 |
+
return None
|
| 95 |
+
|
| 96 |
+
def get_history_messages_excluding_current(self, current_user_query: str) -> List[SessionMessage]:
|
| 97 |
+
history = []
|
| 98 |
+
for msg in self._session.messages:
|
| 99 |
+
if msg.role == "user" and msg.content == current_user_query:
|
| 100 |
+
continue
|
| 101 |
+
history.append(msg)
|
| 102 |
+
logger.debug(f"Retrieved {len(history)} history messages (excluding current query)")
|
| 103 |
+
return history
|
| 104 |
+
|
| 105 |
+
def set_active_task_id(self, task_id: str) -> None:
|
| 106 |
+
self._session.active_task_id = task_id
|
| 107 |
+
logger.debug(f"Set active task_id: {task_id}")
|
| 108 |
+
|
| 109 |
+
def get_active_task_id(self) -> Optional[str]:
|
| 110 |
+
return self._session.active_task_id
|
| 111 |
+
|
| 112 |
+
def is_initialized(self) -> bool:
|
| 113 |
+
return self._session.initialized
|
| 114 |
+
|
| 115 |
+
def get_stats(self) -> Dict[str, Any]:
|
| 116 |
+
return {
|
| 117 |
+
"conversation_id": self._session.conversation_id,
|
| 118 |
+
"initialized": self._session.initialized,
|
| 119 |
+
"active_task_id": self._session.active_task_id,
|
| 120 |
+
"message_count": len(self._session.messages),
|
| 121 |
+
"created_at": self._session.created_at,
|
| 122 |
+
"last_activity": self._session.last_activity
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
# Global session manager instance
|
| 127 |
+
_global_session: Optional[GlobalSessionManager] = None
|
| 128 |
+
|
| 129 |
+
def get_global_session() -> GlobalSessionManager:
|
| 130 |
+
global _global_session
|
| 131 |
+
if _global_session is None:
|
| 132 |
+
_global_session = GlobalSessionManager()
|
| 133 |
+
return _global_session
|
warp2protobuf/core/stream_processor.py
ADDED
|
@@ -0,0 +1,334 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
流式数据包处理器
|
| 5 |
+
|
| 6 |
+
处理流式protobuf数据包,支持实时解析和WebSocket推送。
|
| 7 |
+
"""
|
| 8 |
+
import asyncio
|
| 9 |
+
import json
|
| 10 |
+
import base64
|
| 11 |
+
from typing import AsyncGenerator, List, Dict, Any, Optional
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
|
| 14 |
+
from .logging import logger
|
| 15 |
+
from .protobuf_utils import protobuf_to_dict
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class StreamProcessor:
|
| 19 |
+
"""流式数据包处理器"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, websocket_manager=None):
|
| 22 |
+
self.websocket_manager = websocket_manager
|
| 23 |
+
self.active_streams: Dict[str, StreamSession] = {}
|
| 24 |
+
|
| 25 |
+
async def create_stream_session(self, stream_id: str, message_type: str = "warp.multi_agent.v1.Response") -> 'StreamSession':
|
| 26 |
+
"""创建流式会话"""
|
| 27 |
+
session = StreamSession(stream_id, message_type, self.websocket_manager)
|
| 28 |
+
self.active_streams[stream_id] = session
|
| 29 |
+
|
| 30 |
+
logger.info(f"创建流式会话: {stream_id}, 消息类型: {message_type}")
|
| 31 |
+
return session
|
| 32 |
+
|
| 33 |
+
async def get_stream_session(self, stream_id: str) -> Optional['StreamSession']:
|
| 34 |
+
"""获取流式会话"""
|
| 35 |
+
return self.active_streams.get(stream_id)
|
| 36 |
+
|
| 37 |
+
async def close_stream_session(self, stream_id: str):
|
| 38 |
+
"""关闭流式会话"""
|
| 39 |
+
if stream_id in self.active_streams:
|
| 40 |
+
session = self.active_streams[stream_id]
|
| 41 |
+
await session.close()
|
| 42 |
+
del self.active_streams[stream_id]
|
| 43 |
+
logger.info(f"关闭流式会话: {stream_id}")
|
| 44 |
+
|
| 45 |
+
async def process_stream_chunk(self, stream_id: str, chunk_data: bytes) -> Dict[str, Any]:
|
| 46 |
+
"""处理流式数据块"""
|
| 47 |
+
session = await self.get_stream_session(stream_id)
|
| 48 |
+
if not session:
|
| 49 |
+
raise ValueError(f"流式会话不存在: {stream_id}")
|
| 50 |
+
|
| 51 |
+
return await session.process_chunk(chunk_data)
|
| 52 |
+
|
| 53 |
+
async def finalize_stream(self, stream_id: str) -> Dict[str, Any]:
|
| 54 |
+
"""完成流式处理"""
|
| 55 |
+
session = await self.get_stream_session(stream_id)
|
| 56 |
+
if not session:
|
| 57 |
+
raise ValueError(f"流式会话不存在: {stream_id}")
|
| 58 |
+
|
| 59 |
+
result = await session.finalize()
|
| 60 |
+
await self.close_stream_session(stream_id)
|
| 61 |
+
return result
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class StreamSession:
|
| 65 |
+
"""流式会话"""
|
| 66 |
+
|
| 67 |
+
def __init__(self, session_id: str, message_type: str, websocket_manager=None):
|
| 68 |
+
self.session_id = session_id
|
| 69 |
+
self.message_type = message_type
|
| 70 |
+
self.websocket_manager = websocket_manager
|
| 71 |
+
|
| 72 |
+
self.chunks: List[bytes] = []
|
| 73 |
+
self.chunk_count = 0
|
| 74 |
+
self.total_size = 0
|
| 75 |
+
self.start_time = datetime.now()
|
| 76 |
+
|
| 77 |
+
self.parsed_chunks: List[Dict] = []
|
| 78 |
+
self.complete_message: Optional[Dict] = None
|
| 79 |
+
|
| 80 |
+
async def process_chunk(self, chunk_data: bytes) -> Dict[str, Any]:
|
| 81 |
+
"""处理单个数据块"""
|
| 82 |
+
self.chunk_count += 1
|
| 83 |
+
self.total_size += len(chunk_data)
|
| 84 |
+
self.chunks.append(chunk_data)
|
| 85 |
+
|
| 86 |
+
logger.debug(f"流式会话 {self.session_id}: 处理数据块 {self.chunk_count}, 大小 {len(chunk_data)} 字节")
|
| 87 |
+
|
| 88 |
+
chunk_result = {
|
| 89 |
+
"chunk_index": self.chunk_count - 1,
|
| 90 |
+
"size": len(chunk_data),
|
| 91 |
+
"timestamp": datetime.now().isoformat()
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
try:
|
| 95 |
+
chunk_json = protobuf_to_dict(chunk_data, self.message_type)
|
| 96 |
+
chunk_result["json_data"] = chunk_json
|
| 97 |
+
chunk_result["parsed_successfully"] = True
|
| 98 |
+
|
| 99 |
+
self.parsed_chunks.append(chunk_json)
|
| 100 |
+
|
| 101 |
+
if self.websocket_manager:
|
| 102 |
+
await self.websocket_manager.broadcast({
|
| 103 |
+
"event": "stream_chunk_parsed",
|
| 104 |
+
"stream_id": self.session_id,
|
| 105 |
+
"chunk": chunk_result
|
| 106 |
+
})
|
| 107 |
+
|
| 108 |
+
except Exception as e:
|
| 109 |
+
chunk_result["error"] = str(e)
|
| 110 |
+
chunk_result["parsed_successfully"] = False
|
| 111 |
+
logger.warning(f"数据块解析失败: {e}")
|
| 112 |
+
|
| 113 |
+
if self.websocket_manager:
|
| 114 |
+
await self.websocket_manager.broadcast({
|
| 115 |
+
"event": "stream_chunk_error",
|
| 116 |
+
"stream_id": self.session_id,
|
| 117 |
+
"chunk": chunk_result
|
| 118 |
+
})
|
| 119 |
+
|
| 120 |
+
return chunk_result
|
| 121 |
+
|
| 122 |
+
async def finalize(self) -> Dict[str, Any]:
|
| 123 |
+
"""完成流式处理,尝试拼接完整消息"""
|
| 124 |
+
duration = (datetime.now() - self.start_time).total_seconds()
|
| 125 |
+
|
| 126 |
+
logger.info(f"流式会话 {self.session_id} 完成: {self.chunk_count} 块, 总大小 {self.total_size} 字节, 耗时 {duration:.2f}s")
|
| 127 |
+
|
| 128 |
+
result = {
|
| 129 |
+
"session_id": self.session_id,
|
| 130 |
+
"chunk_count": self.chunk_count,
|
| 131 |
+
"total_size": self.total_size,
|
| 132 |
+
"duration_seconds": duration,
|
| 133 |
+
"chunks": []
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
for i, chunk in enumerate(self.chunks):
|
| 137 |
+
chunk_info = {
|
| 138 |
+
"index": i,
|
| 139 |
+
"size": len(chunk),
|
| 140 |
+
"hex_preview": chunk[:32].hex() if len(chunk) >= 32 else chunk.hex()
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
if i < len(self.parsed_chunks):
|
| 144 |
+
chunk_info["parsed_data"] = self.parsed_chunks[i]
|
| 145 |
+
|
| 146 |
+
result["chunks"].append(chunk_info)
|
| 147 |
+
|
| 148 |
+
try:
|
| 149 |
+
complete_data = b''.join(self.chunks)
|
| 150 |
+
complete_json = protobuf_to_dict(complete_data, self.message_type)
|
| 151 |
+
|
| 152 |
+
result["complete_message"] = {
|
| 153 |
+
"size": len(complete_data),
|
| 154 |
+
"json_data": complete_json,
|
| 155 |
+
"assembly_successful": True
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
self.complete_message = complete_json
|
| 159 |
+
|
| 160 |
+
logger.info(f"流式消息拼接成功: {len(complete_data)} 字节")
|
| 161 |
+
|
| 162 |
+
except Exception as e:
|
| 163 |
+
result["complete_message"] = {
|
| 164 |
+
"error": str(e),
|
| 165 |
+
"assembly_successful": False
|
| 166 |
+
}
|
| 167 |
+
logger.warning(f"流式消息拼接失败: {e}")
|
| 168 |
+
|
| 169 |
+
if self.websocket_manager:
|
| 170 |
+
await self.websocket_manager.broadcast({
|
| 171 |
+
"event": "stream_completed",
|
| 172 |
+
"stream_id": self.session_id,
|
| 173 |
+
"result": result
|
| 174 |
+
})
|
| 175 |
+
|
| 176 |
+
return result
|
| 177 |
+
|
| 178 |
+
async def close(self):
|
| 179 |
+
"""关闭会话"""
|
| 180 |
+
self.chunks.clear()
|
| 181 |
+
self.parsed_chunks.clear()
|
| 182 |
+
self.complete_message = None
|
| 183 |
+
|
| 184 |
+
logger.debug(f"流式会话 {self.session_id} 已关闭")
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class StreamPacketAnalyzer:
|
| 188 |
+
"""流式数据包分析器"""
|
| 189 |
+
|
| 190 |
+
@staticmethod
|
| 191 |
+
def analyze_chunk_patterns(chunks: List[bytes]) -> Dict[str, Any]:
|
| 192 |
+
if not chunks:
|
| 193 |
+
return {"error": "无数据块"}
|
| 194 |
+
|
| 195 |
+
analysis = {
|
| 196 |
+
"total_chunks": len(chunks),
|
| 197 |
+
"size_distribution": {},
|
| 198 |
+
"size_stats": {},
|
| 199 |
+
"pattern_analysis": {}
|
| 200 |
+
}
|
| 201 |
+
|
| 202 |
+
sizes = [len(chunk) for chunk in chunks]
|
| 203 |
+
analysis["size_stats"] = {
|
| 204 |
+
"min": min(sizes),
|
| 205 |
+
"max": max(sizes),
|
| 206 |
+
"avg": sum(sizes) / len(sizes),
|
| 207 |
+
"total": sum(sizes)
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
size_ranges = [(0, 100), (100, 500), (500, 1000), (1000, 5000), (5000, float('inf'))]
|
| 211 |
+
for start, end in size_ranges:
|
| 212 |
+
range_name = f"{start}-{end if end != float('inf') else '∞'}"
|
| 213 |
+
count = sum(1 for size in sizes if start <= size < end)
|
| 214 |
+
analysis["size_distribution"][range_name] = count
|
| 215 |
+
|
| 216 |
+
if len(chunks) >= 2:
|
| 217 |
+
first_bytes = [chunk[:4].hex() if len(chunk) >= 4 else chunk.hex() for chunk in chunks[:5]]
|
| 218 |
+
analysis["pattern_analysis"]["first_bytes_samples"] = first_bytes
|
| 219 |
+
|
| 220 |
+
if chunks:
|
| 221 |
+
common_prefix_len = 0
|
| 222 |
+
first_chunk = chunks[0]
|
| 223 |
+
for i in range(min(len(first_chunk), 10)):
|
| 224 |
+
if all(len(chunk) > i and chunk[i] == first_chunk[i] for chunk in chunks[1:]):
|
| 225 |
+
common_prefix_len = i + 1
|
| 226 |
+
else:
|
| 227 |
+
break
|
| 228 |
+
|
| 229 |
+
if common_prefix_len > 0:
|
| 230 |
+
analysis["pattern_analysis"]["common_prefix_length"] = common_prefix_len
|
| 231 |
+
analysis["pattern_analysis"]["common_prefix_hex"] = first_chunk[:common_prefix_len].hex()
|
| 232 |
+
|
| 233 |
+
return analysis
|
| 234 |
+
|
| 235 |
+
@staticmethod
|
| 236 |
+
def extract_streaming_deltas(parsed_chunks: List[Dict]) -> List[Dict]:
|
| 237 |
+
if not parsed_chunks:
|
| 238 |
+
return []
|
| 239 |
+
|
| 240 |
+
deltas = []
|
| 241 |
+
previous_content = ""
|
| 242 |
+
|
| 243 |
+
for i, chunk in enumerate(parsed_chunks):
|
| 244 |
+
delta = {
|
| 245 |
+
"chunk_index": i,
|
| 246 |
+
"timestamp": datetime.now().isoformat()
|
| 247 |
+
}
|
| 248 |
+
|
| 249 |
+
current_content = StreamPacketAnalyzer._extract_text_content(chunk)
|
| 250 |
+
|
| 251 |
+
if current_content and current_content != previous_content:
|
| 252 |
+
if previous_content and current_content.startswith(previous_content):
|
| 253 |
+
delta["content_delta"] = current_content[len(previous_content):]
|
| 254 |
+
delta["delta_type"] = "append"
|
| 255 |
+
else:
|
| 256 |
+
delta["content_delta"] = current_content
|
| 257 |
+
delta["delta_type"] = "replace"
|
| 258 |
+
|
| 259 |
+
delta["total_content_length"] = len(current_content)
|
| 260 |
+
previous_content = current_content
|
| 261 |
+
else:
|
| 262 |
+
delta["content_delta"] = ""
|
| 263 |
+
delta["delta_type"] = "no_change"
|
| 264 |
+
|
| 265 |
+
if i > 0:
|
| 266 |
+
delta["field_changes"] = StreamPacketAnalyzer._compare_dicts(parsed_chunks[i-1], chunk)
|
| 267 |
+
|
| 268 |
+
deltas.append(delta)
|
| 269 |
+
|
| 270 |
+
return deltas
|
| 271 |
+
|
| 272 |
+
@staticmethod
|
| 273 |
+
def _extract_text_content(data: Dict) -> str:
|
| 274 |
+
text_paths = [
|
| 275 |
+
["content"],
|
| 276 |
+
["text"],
|
| 277 |
+
["message"],
|
| 278 |
+
["agent_output", "text"],
|
| 279 |
+
["choices", 0, "delta", "content"],
|
| 280 |
+
["choices", 0, "message", "content"]
|
| 281 |
+
]
|
| 282 |
+
|
| 283 |
+
for path in text_paths:
|
| 284 |
+
try:
|
| 285 |
+
current = data
|
| 286 |
+
for key in path:
|
| 287 |
+
if isinstance(current, dict) and key in current:
|
| 288 |
+
current = current[key]
|
| 289 |
+
elif isinstance(current, list) and isinstance(key, int) and 0 <= key < len(current):
|
| 290 |
+
current = current[key]
|
| 291 |
+
else:
|
| 292 |
+
break
|
| 293 |
+
else:
|
| 294 |
+
if isinstance(current, str):
|
| 295 |
+
return current
|
| 296 |
+
except Exception:
|
| 297 |
+
continue
|
| 298 |
+
|
| 299 |
+
return ""
|
| 300 |
+
|
| 301 |
+
@staticmethod
|
| 302 |
+
def _compare_dicts(dict1: Dict, dict2: Dict, prefix: str = "") -> List[str]:
|
| 303 |
+
changes = []
|
| 304 |
+
|
| 305 |
+
all_keys = set(dict1.keys()) | set(dict2.keys())
|
| 306 |
+
|
| 307 |
+
for key in all_keys:
|
| 308 |
+
current_path = f"{prefix}.{key}" if prefix else key
|
| 309 |
+
|
| 310 |
+
if key not in dict1:
|
| 311 |
+
changes.append(f"添加: {current_path}")
|
| 312 |
+
elif key not in dict2:
|
| 313 |
+
changes.append(f"删除: {current_path}")
|
| 314 |
+
elif dict1[key] != dict2[key]:
|
| 315 |
+
if isinstance(dict1[key], dict) and isinstance(dict2[key], dict):
|
| 316 |
+
changes.extend(StreamPacketAnalyzer._compare_dicts(dict1[key], dict2[key], current_path))
|
| 317 |
+
else:
|
| 318 |
+
changes.append(f"修改: {current_path}")
|
| 319 |
+
|
| 320 |
+
return changes[:10]
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
_global_processor: Optional[StreamProcessor] = None
|
| 324 |
+
|
| 325 |
+
def get_stream_processor() -> StreamProcessor:
|
| 326 |
+
global _global_processor
|
| 327 |
+
if _global_processor is None:
|
| 328 |
+
_global_processor = StreamProcessor()
|
| 329 |
+
return _global_processor
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def set_websocket_manager(manager):
|
| 333 |
+
processor = get_stream_processor()
|
| 334 |
+
processor.websocket_manager = manager
|
warp2protobuf/warp/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Subpackage for Warp API client integrations
|
| 2 |
+
__all__ = []
|
warp2protobuf/warp/api_client.py
ADDED
|
@@ -0,0 +1,426 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Warp API客户端模块
|
| 5 |
+
|
| 6 |
+
处理与Warp API的通信,包括protobuf数据发送和SSE响应解析。
|
| 7 |
+
"""
|
| 8 |
+
import httpx
|
| 9 |
+
import os
|
| 10 |
+
import base64
|
| 11 |
+
import binascii
|
| 12 |
+
from typing import Optional, Any, Dict
|
| 13 |
+
from urllib.parse import urlparse
|
| 14 |
+
import socket
|
| 15 |
+
|
| 16 |
+
from ..core.logging import logger
|
| 17 |
+
from ..core.protobuf_utils import protobuf_to_dict
|
| 18 |
+
from ..core.auth import get_valid_jwt, acquire_anonymous_access_token
|
| 19 |
+
from ..config.settings import WARP_URL as CONFIG_WARP_URL
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _get(d: Dict[str, Any], *names: str) -> Any:
|
| 23 |
+
"""Return the first matching key value (camelCase/snake_case tolerant)."""
|
| 24 |
+
for name in names:
|
| 25 |
+
if name in d:
|
| 26 |
+
return d[name]
|
| 27 |
+
return None
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _get_event_type(event_data: dict) -> str:
|
| 31 |
+
"""Determine the type of SSE event for logging"""
|
| 32 |
+
if "init" in event_data:
|
| 33 |
+
return "INITIALIZATION"
|
| 34 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 35 |
+
if isinstance(client_actions, dict):
|
| 36 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 37 |
+
if not actions:
|
| 38 |
+
return "CLIENT_ACTIONS_EMPTY"
|
| 39 |
+
|
| 40 |
+
action_types = []
|
| 41 |
+
for action in actions:
|
| 42 |
+
if _get(action, "create_task", "createTask") is not None:
|
| 43 |
+
action_types.append("CREATE_TASK")
|
| 44 |
+
elif _get(action, "append_to_message_content", "appendToMessageContent") is not None:
|
| 45 |
+
action_types.append("APPEND_CONTENT")
|
| 46 |
+
elif _get(action, "add_messages_to_task", "addMessagesToTask") is not None:
|
| 47 |
+
action_types.append("ADD_MESSAGE")
|
| 48 |
+
elif _get(action, "tool_call", "toolCall") is not None:
|
| 49 |
+
action_types.append("TOOL_CALL")
|
| 50 |
+
elif _get(action, "tool_response", "toolResponse") is not None:
|
| 51 |
+
action_types.append("TOOL_RESPONSE")
|
| 52 |
+
else:
|
| 53 |
+
action_types.append("UNKNOWN_ACTION")
|
| 54 |
+
|
| 55 |
+
return f"CLIENT_ACTIONS({', '.join(action_types)})"
|
| 56 |
+
elif "finished" in event_data:
|
| 57 |
+
return "FINISHED"
|
| 58 |
+
else:
|
| 59 |
+
return "UNKNOWN_EVENT"
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
async def send_protobuf_to_warp_api(
|
| 63 |
+
protobuf_bytes: bytes, show_all_events: bool = True
|
| 64 |
+
) -> tuple[str, Optional[str], Optional[str]]:
|
| 65 |
+
"""发送protobuf数据到Warp API并获取响应"""
|
| 66 |
+
try:
|
| 67 |
+
logger.info(f"发送 {len(protobuf_bytes)} 字节到Warp API")
|
| 68 |
+
logger.info(f"数据包前32字节 (hex): {protobuf_bytes[:32].hex()}")
|
| 69 |
+
|
| 70 |
+
warp_url = CONFIG_WARP_URL
|
| 71 |
+
|
| 72 |
+
logger.info(f"发送请求到: {warp_url}")
|
| 73 |
+
|
| 74 |
+
conversation_id = None
|
| 75 |
+
task_id = None
|
| 76 |
+
complete_response = []
|
| 77 |
+
all_events = []
|
| 78 |
+
event_count = 0
|
| 79 |
+
|
| 80 |
+
verify_opt = True
|
| 81 |
+
insecure_env = os.getenv("WARP_INSECURE_TLS", "").lower()
|
| 82 |
+
if insecure_env in ("1", "true", "yes"):
|
| 83 |
+
verify_opt = False
|
| 84 |
+
logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API client")
|
| 85 |
+
|
| 86 |
+
async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client:
|
| 87 |
+
# 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次
|
| 88 |
+
for attempt in range(2):
|
| 89 |
+
jwt = await get_valid_jwt() if attempt == 0 else jwt # keep existing unless refreshed explicitly
|
| 90 |
+
headers = {
|
| 91 |
+
"accept": "text/event-stream",
|
| 92 |
+
"content-type": "application/x-protobuf",
|
| 93 |
+
"x-warp-client-version": "v0.2025.08.06.08.12.stable_02",
|
| 94 |
+
"x-warp-os-category": "Windows",
|
| 95 |
+
"x-warp-os-name": "Windows",
|
| 96 |
+
"x-warp-os-version": "11 (26100)",
|
| 97 |
+
"authorization": f"Bearer {jwt}",
|
| 98 |
+
"content-length": str(len(protobuf_bytes)),
|
| 99 |
+
}
|
| 100 |
+
async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response:
|
| 101 |
+
if response.status_code != 200:
|
| 102 |
+
error_text = await response.aread()
|
| 103 |
+
error_content = error_text.decode('utf-8') if error_text else "No error content"
|
| 104 |
+
# 检测配额耗尽错误并在第一次失败时尝试申请匿名token
|
| 105 |
+
if response.status_code == 429 and attempt == 0 and (
|
| 106 |
+
("No remaining quota" in error_content) or ("No AI requests remaining" in error_content)
|
| 107 |
+
):
|
| 108 |
+
logger.warning("WARP API 返回 429 (配额用尽)。尝试申请匿名token并重试一次…")
|
| 109 |
+
try:
|
| 110 |
+
new_jwt = await acquire_anonymous_access_token()
|
| 111 |
+
except Exception:
|
| 112 |
+
new_jwt = None
|
| 113 |
+
if new_jwt:
|
| 114 |
+
jwt = new_jwt
|
| 115 |
+
# 跳出当前响应并进行下一次尝试
|
| 116 |
+
continue
|
| 117 |
+
else:
|
| 118 |
+
logger.error("匿名token申请失败,无法重试。")
|
| 119 |
+
logger.error(f"WARP API HTTP ERROR {response.status_code}: {error_content}")
|
| 120 |
+
return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None
|
| 121 |
+
# 其他错误或第二次失败
|
| 122 |
+
logger.error(f"WARP API HTTP ERROR {response.status_code}: {error_content}")
|
| 123 |
+
return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None
|
| 124 |
+
|
| 125 |
+
logger.info(f"✅ 收到HTTP {response.status_code}响应")
|
| 126 |
+
logger.info("开始处理SSE事件流...")
|
| 127 |
+
|
| 128 |
+
import re as _re
|
| 129 |
+
def _parse_payload_bytes(data_str: str):
|
| 130 |
+
s = _re.sub(r"\s+", "", data_str or "")
|
| 131 |
+
if not s:
|
| 132 |
+
return None
|
| 133 |
+
if _re.fullmatch(r"[0-9a-fA-F]+", s or ""):
|
| 134 |
+
try:
|
| 135 |
+
return bytes.fromhex(s)
|
| 136 |
+
except Exception:
|
| 137 |
+
pass
|
| 138 |
+
pad = "=" * ((4 - (len(s) % 4)) % 4)
|
| 139 |
+
try:
|
| 140 |
+
import base64 as _b64
|
| 141 |
+
return _b64.urlsafe_b64decode(s + pad)
|
| 142 |
+
except Exception:
|
| 143 |
+
try:
|
| 144 |
+
return _b64.b64decode(s + pad)
|
| 145 |
+
except Exception:
|
| 146 |
+
return None
|
| 147 |
+
|
| 148 |
+
current_data = ""
|
| 149 |
+
|
| 150 |
+
async for line in response.aiter_lines():
|
| 151 |
+
if line.startswith("data:"):
|
| 152 |
+
payload = line[5:].strip()
|
| 153 |
+
if not payload:
|
| 154 |
+
continue
|
| 155 |
+
if payload == "[DONE]":
|
| 156 |
+
logger.info("收到[DONE]标记,结束处理")
|
| 157 |
+
break
|
| 158 |
+
current_data += payload
|
| 159 |
+
continue
|
| 160 |
+
|
| 161 |
+
if (line.strip() == "") and current_data:
|
| 162 |
+
raw_bytes = _parse_payload_bytes(current_data)
|
| 163 |
+
current_data = ""
|
| 164 |
+
if raw_bytes is None:
|
| 165 |
+
logger.debug("跳过无法解析的SSE数据块(非hex/base64或不完整)")
|
| 166 |
+
continue
|
| 167 |
+
try:
|
| 168 |
+
event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent")
|
| 169 |
+
except Exception as parse_error:
|
| 170 |
+
logger.debug(f"解析事件失败,跳过: {str(parse_error)[:100]}")
|
| 171 |
+
continue
|
| 172 |
+
event_count += 1
|
| 173 |
+
|
| 174 |
+
def _get(d: Dict[str, Any], *names: str) -> Any:
|
| 175 |
+
for n in names:
|
| 176 |
+
if isinstance(d, dict) and n in d:
|
| 177 |
+
return d[n]
|
| 178 |
+
return None
|
| 179 |
+
|
| 180 |
+
event_type = _get_event_type(event_data)
|
| 181 |
+
if show_all_events:
|
| 182 |
+
all_events.append({"event_number": event_count, "event_type": event_type, "raw_data": event_data})
|
| 183 |
+
logger.info(f"🔄 Event #{event_count}: {event_type}")
|
| 184 |
+
if show_all_events:
|
| 185 |
+
logger.info(f" 📋 Event data: {str(event_data)}...")
|
| 186 |
+
|
| 187 |
+
if "init" in event_data:
|
| 188 |
+
init_data = event_data["init"]
|
| 189 |
+
conversation_id = init_data.get("conversation_id", conversation_id)
|
| 190 |
+
task_id = init_data.get("task_id", task_id)
|
| 191 |
+
logger.info(f"会话初始化: {conversation_id}")
|
| 192 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 193 |
+
if isinstance(client_actions, dict):
|
| 194 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 195 |
+
for i, action in enumerate(actions):
|
| 196 |
+
logger.info(f" 🎯 Action #{i+1}: {list(action.keys())}")
|
| 197 |
+
append_data = _get(action, "append_to_message_content", "appendToMessageContent")
|
| 198 |
+
if isinstance(append_data, dict):
|
| 199 |
+
message = append_data.get("message", {})
|
| 200 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 201 |
+
text_content = agent_output.get("text", "")
|
| 202 |
+
if text_content:
|
| 203 |
+
complete_response.append(text_content)
|
| 204 |
+
logger.info(f" 📝 Text Fragment: {text_content[:100]}...")
|
| 205 |
+
messages_data = _get(action, "add_messages_to_task", "addMessagesToTask")
|
| 206 |
+
if isinstance(messages_data, dict):
|
| 207 |
+
messages = messages_data.get("messages", [])
|
| 208 |
+
task_id = messages_data.get("task_id", messages_data.get("taskId", task_id))
|
| 209 |
+
for j, message in enumerate(messages):
|
| 210 |
+
logger.info(f" 📨 Message #{j+1}: {list(message.keys())}")
|
| 211 |
+
if _get(message, "agent_output", "agentOutput") is not None:
|
| 212 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 213 |
+
text_content = agent_output.get("text", "")
|
| 214 |
+
if text_content:
|
| 215 |
+
complete_response.append(text_content)
|
| 216 |
+
logger.info(f" 📝 Complete Message: {text_content[:100]}...")
|
| 217 |
+
|
| 218 |
+
full_response = "".join(complete_response)
|
| 219 |
+
logger.info("="*60)
|
| 220 |
+
logger.info("📊 SSE STREAM SUMMARY")
|
| 221 |
+
logger.info("="*60)
|
| 222 |
+
logger.info(f"📈 Total Events Processed: {event_count}")
|
| 223 |
+
logger.info(f"🆔 Conversation ID: {conversation_id}")
|
| 224 |
+
logger.info(f"🆔 Task ID: {task_id}")
|
| 225 |
+
logger.info(f"📝 Response Length: {len(full_response)} characters")
|
| 226 |
+
logger.info("="*60)
|
| 227 |
+
if full_response:
|
| 228 |
+
logger.info(f"✅ Stream processing completed successfully")
|
| 229 |
+
return full_response, conversation_id, task_id
|
| 230 |
+
else:
|
| 231 |
+
logger.warning("⚠️ No text content received in response")
|
| 232 |
+
return "Warning: No response content received", conversation_id, task_id
|
| 233 |
+
except Exception as e:
|
| 234 |
+
import traceback
|
| 235 |
+
logger.error("="*60)
|
| 236 |
+
logger.error("WARP API CLIENT EXCEPTION")
|
| 237 |
+
logger.error("="*60)
|
| 238 |
+
logger.error(f"Exception Type: {type(e).__name__}")
|
| 239 |
+
logger.error(f"Exception Message: {str(e)}")
|
| 240 |
+
logger.error(f"Request URL: {warp_url if 'warp_url' in locals() else 'Unknown'}")
|
| 241 |
+
logger.error(f"Request Size: {len(protobuf_bytes) if 'protobuf_bytes' in locals() else 'Unknown'}")
|
| 242 |
+
logger.error("Python Traceback:")
|
| 243 |
+
logger.error(traceback.format_exc())
|
| 244 |
+
logger.error("="*60)
|
| 245 |
+
raise
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
async def send_protobuf_to_warp_api_parsed(protobuf_bytes: bytes) -> tuple[str, Optional[str], Optional[str], list]:
|
| 249 |
+
"""发送protobuf数据到Warp API并获取解析后的SSE事件数据"""
|
| 250 |
+
try:
|
| 251 |
+
logger.info(f"发送 {len(protobuf_bytes)} 字节到Warp API (解析模式)")
|
| 252 |
+
logger.info(f"数据包前32字节 (hex): {protobuf_bytes[:32].hex()}")
|
| 253 |
+
|
| 254 |
+
warp_url = CONFIG_WARP_URL
|
| 255 |
+
|
| 256 |
+
logger.info(f"发送请求到: {warp_url}")
|
| 257 |
+
|
| 258 |
+
conversation_id = None
|
| 259 |
+
task_id = None
|
| 260 |
+
complete_response = []
|
| 261 |
+
parsed_events = []
|
| 262 |
+
event_count = 0
|
| 263 |
+
|
| 264 |
+
verify_opt = True
|
| 265 |
+
insecure_env = os.getenv("WARP_INSECURE_TLS", "").lower()
|
| 266 |
+
if insecure_env in ("1", "true", "yes"):
|
| 267 |
+
verify_opt = False
|
| 268 |
+
logger.warning("TLS verification disabled via WARP_INSECURE_TLS for Warp API client")
|
| 269 |
+
|
| 270 |
+
async with httpx.AsyncClient(http2=True, timeout=httpx.Timeout(60.0), verify=verify_opt, trust_env=True) as client:
|
| 271 |
+
# 最多尝试两次:第一次失败且为配额429时申请匿名token并重试一次
|
| 272 |
+
for attempt in range(2):
|
| 273 |
+
jwt = await get_valid_jwt() if attempt == 0 else jwt # keep existing unless refreshed explicitly
|
| 274 |
+
headers = {
|
| 275 |
+
"accept": "text/event-stream",
|
| 276 |
+
"content-type": "application/x-protobuf",
|
| 277 |
+
"x-warp-client-version": "v0.2025.08.06.08.12.stable_02",
|
| 278 |
+
"x-warp-os-category": "Windows",
|
| 279 |
+
"x-warp-os-name": "Windows",
|
| 280 |
+
"x-warp-os-version": "11 (26100)",
|
| 281 |
+
"authorization": f"Bearer {jwt}",
|
| 282 |
+
"content-length": str(len(protobuf_bytes)),
|
| 283 |
+
}
|
| 284 |
+
async with client.stream("POST", warp_url, headers=headers, content=protobuf_bytes) as response:
|
| 285 |
+
if response.status_code != 200:
|
| 286 |
+
error_text = await response.aread()
|
| 287 |
+
error_content = error_text.decode('utf-8') if error_text else "No error content"
|
| 288 |
+
# 检测配额耗尽错误并在第一次失败时尝试申请匿名token
|
| 289 |
+
if response.status_code == 429 and attempt == 0 and (
|
| 290 |
+
("No remaining quota" in error_content) or ("No AI requests remaining" in error_content)
|
| 291 |
+
):
|
| 292 |
+
logger.warning("WARP API 返回 429 (配额用尽, 解析模式)。尝试申请匿名token并重试一次…")
|
| 293 |
+
try:
|
| 294 |
+
new_jwt = await acquire_anonymous_access_token()
|
| 295 |
+
except Exception:
|
| 296 |
+
new_jwt = None
|
| 297 |
+
if new_jwt:
|
| 298 |
+
jwt = new_jwt
|
| 299 |
+
# 跳出当前响应并进行下一次尝试
|
| 300 |
+
continue
|
| 301 |
+
else:
|
| 302 |
+
logger.error("匿名token申请失败,无法重试 (解析模式)。")
|
| 303 |
+
logger.error(f"WARP API HTTP ERROR (解析模式) {response.status_code}: {error_content}")
|
| 304 |
+
return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None, []
|
| 305 |
+
# 其他错误或第二次失败
|
| 306 |
+
logger.error(f"WARP API HTTP ERROR (解析模式) {response.status_code}: {error_content}")
|
| 307 |
+
return f"❌ Warp API Error (HTTP {response.status_code}): {error_content}", None, None, []
|
| 308 |
+
|
| 309 |
+
logger.info(f"✅ 收到HTTP {response.status_code}响应 (解析模式)")
|
| 310 |
+
logger.info("开始处理SSE事件流...")
|
| 311 |
+
|
| 312 |
+
import re as _re2
|
| 313 |
+
def _parse_payload_bytes2(data_str: str):
|
| 314 |
+
s = _re2.sub(r"\s+", "", data_str or "")
|
| 315 |
+
if not s:
|
| 316 |
+
return None
|
| 317 |
+
if _re2.fullmatch(r"[0-9a-fA-F]+", s or ""):
|
| 318 |
+
try:
|
| 319 |
+
return bytes.fromhex(s)
|
| 320 |
+
except Exception:
|
| 321 |
+
pass
|
| 322 |
+
pad = "=" * ((4 - (len(s) % 4)) % 4)
|
| 323 |
+
try:
|
| 324 |
+
import base64 as _b642
|
| 325 |
+
return _b642.urlsafe_b64decode(s + pad)
|
| 326 |
+
except Exception:
|
| 327 |
+
try:
|
| 328 |
+
return _b642.b64decode(s + pad)
|
| 329 |
+
except Exception:
|
| 330 |
+
return None
|
| 331 |
+
|
| 332 |
+
current_data = ""
|
| 333 |
+
|
| 334 |
+
async for line in response.aiter_lines():
|
| 335 |
+
if line.startswith("data:"):
|
| 336 |
+
payload = line[5:].strip()
|
| 337 |
+
if not payload:
|
| 338 |
+
continue
|
| 339 |
+
if payload == "[DONE]":
|
| 340 |
+
logger.info("收到[DONE]标记,结束处理")
|
| 341 |
+
break
|
| 342 |
+
current_data += payload
|
| 343 |
+
continue
|
| 344 |
+
|
| 345 |
+
if (line.strip() == "") and current_data:
|
| 346 |
+
raw_bytes = _parse_payload_bytes2(current_data)
|
| 347 |
+
current_data = ""
|
| 348 |
+
if raw_bytes is None:
|
| 349 |
+
logger.debug("跳过无法解析的SSE数据块(非hex/base64或不完整)")
|
| 350 |
+
continue
|
| 351 |
+
try:
|
| 352 |
+
event_data = protobuf_to_dict(raw_bytes, "warp.multi_agent.v1.ResponseEvent")
|
| 353 |
+
event_count += 1
|
| 354 |
+
event_type = _get_event_type(event_data)
|
| 355 |
+
parsed_event = {"event_number": event_count, "event_type": event_type, "parsed_data": event_data}
|
| 356 |
+
parsed_events.append(parsed_event)
|
| 357 |
+
logger.info(f"🔄 Event #{event_count}: {event_type}")
|
| 358 |
+
logger.debug(f" 📋 Event data: {str(event_data)}...")
|
| 359 |
+
|
| 360 |
+
def _get(d: Dict[str, Any], *names: str) -> Any:
|
| 361 |
+
for n in names:
|
| 362 |
+
if isinstance(d, dict) and n in d:
|
| 363 |
+
return d[n]
|
| 364 |
+
return None
|
| 365 |
+
|
| 366 |
+
if "init" in event_data:
|
| 367 |
+
init_data = event_data["init"]
|
| 368 |
+
conversation_id = init_data.get("conversation_id", conversation_id)
|
| 369 |
+
task_id = init_data.get("task_id", task_id)
|
| 370 |
+
logger.info(f"会话初始化: {conversation_id}")
|
| 371 |
+
|
| 372 |
+
client_actions = _get(event_data, "client_actions", "clientActions")
|
| 373 |
+
if isinstance(client_actions, dict):
|
| 374 |
+
actions = _get(client_actions, "actions", "Actions") or []
|
| 375 |
+
for i, action in enumerate(actions):
|
| 376 |
+
logger.info(f" 🎯 Action #{i+1}: {list(action.keys())}")
|
| 377 |
+
append_data = _get(action, "append_to_message_content", "appendToMessageContent")
|
| 378 |
+
if isinstance(append_data, dict):
|
| 379 |
+
message = append_data.get("message", {})
|
| 380 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 381 |
+
text_content = agent_output.get("text", "")
|
| 382 |
+
if text_content:
|
| 383 |
+
complete_response.append(text_content)
|
| 384 |
+
logger.info(f" 📝 Text Fragment: {text_content[:100]}...")
|
| 385 |
+
messages_data = _get(action, "add_messages_to_task", "addMessagesToTask")
|
| 386 |
+
if isinstance(messages_data, dict):
|
| 387 |
+
messages = messages_data.get("messages", [])
|
| 388 |
+
task_id = messages_data.get("task_id", messages_data.get("taskId", task_id))
|
| 389 |
+
for j, message in enumerate(messages):
|
| 390 |
+
logger.info(f" 📨 Message #{j+1}: {list(message.keys())}")
|
| 391 |
+
if _get(message, "agent_output", "agentOutput") is not None:
|
| 392 |
+
agent_output = _get(message, "agent_output", "agentOutput") or {}
|
| 393 |
+
text_content = agent_output.get("text", "")
|
| 394 |
+
if text_content:
|
| 395 |
+
complete_response.append(text_content)
|
| 396 |
+
logger.info(f" 📝 Complete Message: {text_content[:100]}...")
|
| 397 |
+
except Exception as parse_err:
|
| 398 |
+
logger.debug(f"解析事件失败,跳过: {str(parse_err)[:100]}")
|
| 399 |
+
continue
|
| 400 |
+
|
| 401 |
+
full_response = "".join(complete_response)
|
| 402 |
+
logger.info("="*60)
|
| 403 |
+
logger.info("📊 SSE STREAM SUMMARY (解析模式)")
|
| 404 |
+
logger.info("="*60)
|
| 405 |
+
logger.info(f"📈 Total Events Processed: {event_count}")
|
| 406 |
+
logger.info(f"🆔 Conversation ID: {conversation_id}")
|
| 407 |
+
logger.info(f"🆔 Task ID: {task_id}")
|
| 408 |
+
logger.info(f"📝 Response Length: {len(full_response)} characters")
|
| 409 |
+
logger.info(f"🎯 Parsed Events Count: {len(parsed_events)}")
|
| 410 |
+
logger.info("="*60)
|
| 411 |
+
|
| 412 |
+
logger.info(f"✅ Stream processing completed successfully (解析模式)")
|
| 413 |
+
return full_response, conversation_id, task_id, parsed_events
|
| 414 |
+
except Exception as e:
|
| 415 |
+
import traceback
|
| 416 |
+
logger.error("="*60)
|
| 417 |
+
logger.error("WARP API CLIENT EXCEPTION (解析模式)")
|
| 418 |
+
logger.error("="*60)
|
| 419 |
+
logger.error(f"Exception Type: {type(e).__name__}")
|
| 420 |
+
logger.error(f"Exception Message: {str(e)}")
|
| 421 |
+
logger.error(f"Request URL: {warp_url if 'warp_url' in locals() else 'Unknown'}")
|
| 422 |
+
logger.error(f"Request Size: {len(protobuf_bytes) if 'protobuf_bytes' in locals() else 'Unknown'}")
|
| 423 |
+
logger.error("Python Traceback:")
|
| 424 |
+
logger.error(traceback.format_exc())
|
| 425 |
+
logger.error("="*60)
|
| 426 |
+
raise
|
warp2protobuf/warp/response.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Warp API response parsing
|
| 5 |
+
|
| 6 |
+
Handles parsing of protobuf responses and extraction of OpenAI-compatible content.
|
| 7 |
+
"""
|
| 8 |
+
from typing import Optional, Dict, List, Any
|
| 9 |
+
|
| 10 |
+
from ..core.logging import logger
|
| 11 |
+
from ..core.protobuf import ensure_proto_runtime, msg_cls
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def extract_openai_content_from_response(payload: bytes) -> dict:
|
| 15 |
+
"""
|
| 16 |
+
Extract OpenAI-compatible content from Warp API response payload.
|
| 17 |
+
"""
|
| 18 |
+
if not payload:
|
| 19 |
+
logger.debug("extract_openai_content_from_response: payload is empty")
|
| 20 |
+
return {"content": None, "tool_calls": [], "finish_reason": None, "metadata": {}}
|
| 21 |
+
|
| 22 |
+
logger.debug(f"extract_openai_content_from_response: processing payload of {len(payload)} bytes")
|
| 23 |
+
|
| 24 |
+
hex_dump = payload.hex()
|
| 25 |
+
logger.debug(f"extract_openai_content_from_response: complete payload hex: {hex_dump}")
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
ensure_proto_runtime()
|
| 29 |
+
ResponseEvent = msg_cls("warp.multi_agent.v1.ResponseEvent")
|
| 30 |
+
response = ResponseEvent()
|
| 31 |
+
response.ParseFromString(payload)
|
| 32 |
+
|
| 33 |
+
result = {"content": "", "tool_calls": [], "finish_reason": None, "metadata": {}}
|
| 34 |
+
|
| 35 |
+
if response.HasField("client_actions"):
|
| 36 |
+
for i, action in enumerate(response.client_actions.actions):
|
| 37 |
+
if action.HasField("append_to_message_content"):
|
| 38 |
+
message = action.append_to_message_content.message
|
| 39 |
+
if message.HasField("agent_output"):
|
| 40 |
+
agent_output = message.agent_output
|
| 41 |
+
if agent_output.text:
|
| 42 |
+
result["content"] += agent_output.text
|
| 43 |
+
if agent_output.reasoning:
|
| 44 |
+
if "reasoning" not in result:
|
| 45 |
+
result["reasoning"] = ""
|
| 46 |
+
result["reasoning"] += agent_output.reasoning
|
| 47 |
+
if message.HasField("tool_call"):
|
| 48 |
+
tool_call = message.tool_call
|
| 49 |
+
openai_tool_call = {
|
| 50 |
+
"id": getattr(tool_call, 'id', f"call_{i}"),
|
| 51 |
+
"type": "function",
|
| 52 |
+
"function": {
|
| 53 |
+
"name": getattr(tool_call, 'name', getattr(tool_call, 'function_name', 'unknown')),
|
| 54 |
+
"arguments": getattr(tool_call, 'arguments', getattr(tool_call, 'parameters', '{}'))
|
| 55 |
+
}
|
| 56 |
+
}
|
| 57 |
+
result["tool_calls"].append(openai_tool_call)
|
| 58 |
+
elif action.HasField("add_messages_to_task"):
|
| 59 |
+
for j, msg in enumerate(action.add_messages_to_task.messages):
|
| 60 |
+
if msg.HasField("agent_output") and msg.agent_output.text:
|
| 61 |
+
result["content"] += msg.agent_output.text
|
| 62 |
+
if msg.HasField("tool_call"):
|
| 63 |
+
tool_call = msg.tool_call
|
| 64 |
+
tool_name = "unknown"
|
| 65 |
+
tool_args = "{}"
|
| 66 |
+
tool_call_id = getattr(tool_call, 'tool_call_id', f"call_{i}_{j}")
|
| 67 |
+
for field, value in tool_call.ListFields():
|
| 68 |
+
if field.name == 'tool_call_id':
|
| 69 |
+
continue
|
| 70 |
+
tool_name = field.name
|
| 71 |
+
if hasattr(value, 'ListFields'):
|
| 72 |
+
tool_fields_dict = {}
|
| 73 |
+
for tool_field, tool_value in value.ListFields():
|
| 74 |
+
if isinstance(tool_value, str):
|
| 75 |
+
tool_fields_dict[tool_field.name] = tool_value
|
| 76 |
+
elif hasattr(tool_value, '__len__') and not isinstance(tool_value, str):
|
| 77 |
+
tool_fields_dict[tool_field.name] = list(tool_value)
|
| 78 |
+
else:
|
| 79 |
+
tool_fields_dict[tool_field.name] = str(tool_value)
|
| 80 |
+
if tool_fields_dict:
|
| 81 |
+
import json
|
| 82 |
+
tool_args = json.dumps(tool_fields_dict)
|
| 83 |
+
break
|
| 84 |
+
openai_tool_call = {
|
| 85 |
+
"id": tool_call_id,
|
| 86 |
+
"type": "function",
|
| 87 |
+
"function": {"name": tool_name, "arguments": tool_args}
|
| 88 |
+
}
|
| 89 |
+
result["tool_calls"].append(openai_tool_call)
|
| 90 |
+
elif action.HasField("update_task_message"):
|
| 91 |
+
umsg = action.update_task_message.message
|
| 92 |
+
if umsg.HasField("agent_output") and umsg.agent_output.text:
|
| 93 |
+
result["content"] += umsg.agent_output.text
|
| 94 |
+
elif action.HasField("create_task"):
|
| 95 |
+
task = action.create_task.task
|
| 96 |
+
for j, msg in enumerate(task.messages):
|
| 97 |
+
if msg.HasField("agent_output") and msg.agent_output.text:
|
| 98 |
+
result["content"] += msg.agent_output.text
|
| 99 |
+
elif action.HasField("update_task_summary"):
|
| 100 |
+
summary = action.update_task_summary.summary
|
| 101 |
+
if summary:
|
| 102 |
+
result["content"] += summary
|
| 103 |
+
if response.HasField("finished"):
|
| 104 |
+
result["finish_reason"] = "stop"
|
| 105 |
+
result["metadata"] = {
|
| 106 |
+
"response_fields": [field.name for field, _ in response.ListFields()],
|
| 107 |
+
"has_client_actions": response.HasField("client_actions"),
|
| 108 |
+
"payload_size": len(payload)
|
| 109 |
+
}
|
| 110 |
+
return result
|
| 111 |
+
except Exception as e:
|
| 112 |
+
logger.error(f"extract_openai_content_from_response: exception occurred: {e}")
|
| 113 |
+
import traceback
|
| 114 |
+
logger.error(f"extract_openai_content_from_response: traceback: {traceback.format_exc()}")
|
| 115 |
+
return {"content": None, "tool_calls": [], "finish_reason": "error", "metadata": {"error": str(e)}}
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def extract_text_from_response(payload: bytes) -> Optional[str]:
|
| 119 |
+
result = extract_openai_content_from_response(payload)
|
| 120 |
+
return result["content"] if result["content"] else None
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def extract_openai_sse_deltas_from_response(payload: bytes) -> List[Dict[str, Any]]:
|
| 124 |
+
if not payload:
|
| 125 |
+
return []
|
| 126 |
+
try:
|
| 127 |
+
ensure_proto_runtime()
|
| 128 |
+
ResponseEvent = msg_cls("warp.multi_agent.v1.ResponseEvent")
|
| 129 |
+
response = ResponseEvent()
|
| 130 |
+
response.ParseFromString(payload)
|
| 131 |
+
deltas = []
|
| 132 |
+
if response.HasField("client_actions"):
|
| 133 |
+
for i, action in enumerate(response.client_actions.actions):
|
| 134 |
+
if action.HasField("append_to_message_content"):
|
| 135 |
+
message = action.append_to_message_content.message
|
| 136 |
+
if message.HasField("agent_output"):
|
| 137 |
+
agent_output = message.agent_output
|
| 138 |
+
if agent_output.text:
|
| 139 |
+
deltas.append({"choices": [{"index": 0, "delta": {"content": agent_output.text}, "finish_reason": None}]})
|
| 140 |
+
if agent_output.reasoning:
|
| 141 |
+
deltas.append({"choices": [{"index": 0, "delta": {"reasoning": agent_output.reasoning}, "finish_reason": None}]})
|
| 142 |
+
if message.HasField("tool_call"):
|
| 143 |
+
tool_call = message.tool_call
|
| 144 |
+
deltas.append({"choices": [{"index": 0, "delta": {"role": "assistant"}, "finish_reason": None}]})
|
| 145 |
+
openai_tool_call = {
|
| 146 |
+
"id": getattr(tool_call, 'tool_call_id', f"call_{i}"),
|
| 147 |
+
"type": "function",
|
| 148 |
+
"function": {
|
| 149 |
+
"name": getattr(tool_call, 'name', 'unknown'),
|
| 150 |
+
"arguments": getattr(tool_call, 'arguments', '{}')
|
| 151 |
+
}
|
| 152 |
+
}
|
| 153 |
+
deltas.append({"choices": [{"index": 0, "delta": {"tool_calls": [openai_tool_call]}, "finish_reason": None}]})
|
| 154 |
+
elif action.HasField("add_messages_to_task"):
|
| 155 |
+
for j, msg in enumerate(action.add_messages_to_task.messages):
|
| 156 |
+
if msg.HasField("agent_output") and msg.agent_output.text:
|
| 157 |
+
deltas.append({"choices": [{"index": 0, "delta": {"content": msg.agent_output.text}, "finish_reason": None}]})
|
| 158 |
+
if msg.HasField("tool_call"):
|
| 159 |
+
tool_call = msg.tool_call
|
| 160 |
+
if j == 0:
|
| 161 |
+
deltas.append({"choices": [{"index": 0, "delta": {"role": "assistant"}, "finish_reason": None}]})
|
| 162 |
+
tool_call_id = getattr(tool_call, 'tool_call_id', f"call_{i}_{j}")
|
| 163 |
+
tool_name = "unknown"
|
| 164 |
+
tool_args = "{}"
|
| 165 |
+
for field, value in tool_call.ListFields():
|
| 166 |
+
if field.name == 'tool_call_id':
|
| 167 |
+
continue
|
| 168 |
+
tool_name = field.name
|
| 169 |
+
if hasattr(value, 'ListFields'):
|
| 170 |
+
tool_fields_dict = {}
|
| 171 |
+
for tool_field, tool_value in value.ListFields():
|
| 172 |
+
if isinstance(tool_value, str):
|
| 173 |
+
tool_fields_dict[tool_field.name] = tool_value
|
| 174 |
+
elif hasattr(tool_value, '__len__') and not isinstance(tool_value, str):
|
| 175 |
+
tool_fields_dict[tool_field.name] = list(tool_value)
|
| 176 |
+
else:
|
| 177 |
+
tool_fields_dict[tool_field.name] = str(tool_value)
|
| 178 |
+
if tool_fields_dict:
|
| 179 |
+
import json
|
| 180 |
+
tool_args = json.dumps(tool_fields_dict)
|
| 181 |
+
break
|
| 182 |
+
openai_tool_call = {"id": tool_call_id, "type": "function", "function": {"name": tool_name, "arguments": tool_args}}
|
| 183 |
+
deltas.append({"choices": [{"index": 0, "delta": {"tool_calls": [openai_tool_call]}, "finish_reason": None}]})
|
| 184 |
+
elif action.HasField("update_task_message"):
|
| 185 |
+
umsg = action.update_task_message.message
|
| 186 |
+
if umsg.HasField("agent_output") and umsg.agent_output.text:
|
| 187 |
+
deltas.append({"choices": [{"index": 0, "delta": {"content": umsg.agent_output.text}, "finish_reason": None}]})
|
| 188 |
+
elif action.HasField("create_task"):
|
| 189 |
+
task = action.create_task.task
|
| 190 |
+
for j, msg in enumerate(task.messages):
|
| 191 |
+
if msg.HasField("agent_output") and msg.agent_output.text:
|
| 192 |
+
deltas.append({"choices": [{"index": 0, "delta": {"content": msg.agent_output.text}, "finish_reason": None}]})
|
| 193 |
+
elif action.HasField("update_task_summary"):
|
| 194 |
+
summary = action.update_task_summary.summary
|
| 195 |
+
if summary:
|
| 196 |
+
deltas.append({"choices": [{"index": 0, "delta": {"content": summary}, "finish_reason": None}]})
|
| 197 |
+
if response.HasField("finished"):
|
| 198 |
+
deltas.append({"choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}]})
|
| 199 |
+
return deltas
|
| 200 |
+
except Exception as e:
|
| 201 |
+
logger.error(f"extract_openai_sse_deltas_from_response: exception occurred: {e}")
|
| 202 |
+
import traceback
|
| 203 |
+
logger.error(f"extract_openai_sse_deltas_from_response: traceback: {traceback.format_exc()}")
|
| 204 |
+
return []
|