bibibi12345 commited on
Commit
85c8196
·
1 Parent(s): 15d977b

added models endpoint

Browse files
Files changed (2) hide show
  1. README.md +5 -1
  2. main.py +24 -0
README.md CHANGED
@@ -55,4 +55,8 @@ The Space will build the Docker image and start the service. The API endpoint wi
55
  * **Method:** `POST`
56
  * **Request Body:** Send a JSON payload conforming to the OpenAI Chat Completions API schema (e.g., specifying `model`, `messages`, `stream`, etc.). The `model` field should correspond to a key in [`models.json`](models.json).
57
  * **Authentication:** Requests must include an `Authorization` header with your API key. Use the format `Bearer your_api_key`. For example, if using the default key, the header would be `Authorization: Bearer 123456`.
58
- * **Response:** The API will return either a standard JSON response or a server-sent event stream, mimicking the OpenAI API behavior based on the `stream` parameter in the request.
 
 
 
 
 
55
  * **Method:** `POST`
56
  * **Request Body:** Send a JSON payload conforming to the OpenAI Chat Completions API schema (e.g., specifying `model`, `messages`, `stream`, etc.). The `model` field should correspond to a key in [`models.json`](models.json).
57
  * **Authentication:** Requests must include an `Authorization` header with your API key. Use the format `Bearer your_api_key`. For example, if using the default key, the header would be `Authorization: Bearer 123456`.
58
+ * **Response:** The API will return either a standard JSON response or a server-sent event stream, mimicking the OpenAI API behavior based on the `stream` parameter in the request.
59
+ * **URL:** `/v1/models`
60
+ * **Method:** `GET`
61
+ * **Description:** Returns a list of available models supported by the proxy, based on the configuration in [`models.json`](models.json).
62
+ * **Authentication:** Requires the same `Authorization: Bearer <API_KEY>` header as the chat completions endpoint.
main.py CHANGED
@@ -62,6 +62,17 @@ class FlowithRequest(BaseModel):
62
  stream: bool
63
  nodeId: str # UUID for Flowith
64
 
 
 
 
 
 
 
 
 
 
 
 
65
  # --- FastAPI App ---
66
  app = FastAPI(
67
  title="OpenAI to Flowith Proxy",
@@ -245,6 +256,19 @@ async def chat_completions(
245
  raise HTTPException(status_code=500, detail=f"Internal server error: {exc}")
246
 
247
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
  # --- Optional: Add a root endpoint for health check ---
249
  @app.get("/")
250
  async def root():
 
62
  stream: bool
63
  nodeId: str # UUID for Flowith
64
 
65
+
66
+ # --- OpenAI Models Endpoint Models ---
67
+ class ModelCard(BaseModel):
68
+ id: str
69
+ object: str = "model"
70
+ # owned_by: str = "user" # Optional: Add other fields if needed
71
+
72
+ class ModelList(BaseModel):
73
+ object: str = "list"
74
+ data: List[ModelCard]
75
+
76
  # --- FastAPI App ---
77
  app = FastAPI(
78
  title="OpenAI to Flowith Proxy",
 
256
  raise HTTPException(status_code=500, detail=f"Internal server error: {exc}")
257
 
258
 
259
+ # --- Models Endpoint ---
260
+ @app.get("/v1/models", response_model=ModelList)
261
+ async def list_models(api_key: str = Depends(verify_api_key)): # Protect with existing auth
262
+ """
263
+ Lists the available models based on the models.json mapping.
264
+ Follows the OpenAI API format.
265
+ """
266
+ model_cards = [
267
+ ModelCard(id=model_id) for model_id in model_mappings.keys()
268
+ ]
269
+ return ModelList(data=model_cards)
270
+
271
+
272
  # --- Optional: Add a root endpoint for health check ---
273
  @app.get("/")
274
  async def root():