cevheri commited on
Commit
90898ed
·
1 Parent(s): 662abb5

style: formmatted code with ruff

Browse files
.env.sample CHANGED
@@ -2,11 +2,14 @@ LOG_LEVEL=DEBUG
2
  AUTH_USERNAME=admin
3
  AUTH_PASSWORD=admin
4
 
5
- USE_MOCK="true"
6
  SECRET_KEY="1234"
7
- API_KEY="sk-lokumai=="
8
  BASE_URL="http://0.0.0.0:7860"
9
 
 
 
 
 
10
  # mongodb, embedded
11
  DB_DATABASE_TYPE=embedded
12
 
 
2
  AUTH_USERNAME=admin
3
  AUTH_PASSWORD=admin
4
 
 
5
  SECRET_KEY="1234"
6
+ API_KEY="sk-admin=="
7
  BASE_URL="http://0.0.0.0:7860"
8
 
9
+
10
+ #deprecated
11
+ USE_MOCK="true"
12
+
13
  # mongodb, embedded
14
  DB_DATABASE_TYPE=embedded
15
 
app/api/chat_api.py CHANGED
@@ -55,7 +55,6 @@ async def create_chat_completion(
55
 
56
  # get all chat completions
57
  @router.get("/chat/completions", response_model=List[ChatCompletionResponse])
58
- @api_response()
59
  async def list_chat_completions(
60
  request: Request,
61
  username: str = Depends(auth_service.verify_credentials),
@@ -64,7 +63,7 @@ async def list_chat_completions(
64
  Get all chat completions
65
  Summary: First load the chat interface(UI) for list of chat completions on the left side.
66
  """
67
-
68
  page: int = 0
69
  limit: int = 10
70
  sort: dict = {"created_date": -1}
 
55
 
56
  # get all chat completions
57
  @router.get("/chat/completions", response_model=List[ChatCompletionResponse])
 
58
  async def list_chat_completions(
59
  request: Request,
60
  username: str = Depends(auth_service.verify_credentials),
 
63
  Get all chat completions
64
  Summary: First load the chat interface(UI) for list of chat completions on the left side.
65
  """
66
+ logger.debug(f"BEGIN API: list_chat_completions for username: {username}")
67
  page: int = 0
68
  limit: int = 10
69
  sort: dict = {"created_date": -1}
app/core/initial_setup/data/initial_chat_completions.json CHANGED
@@ -7,7 +7,7 @@
7
  "object": "chat.completion",
8
  "is_archived": false,
9
  "is_starred": false,
10
- "created_by": "system",
11
  "created_date": "2024-03-20T10:00:00Z",
12
  "last_updated_by": "system",
13
  "last_updated_date": "2024-03-20T11:00:00Z",
@@ -46,7 +46,7 @@
46
  "object": "chat.completion",
47
  "is_archived": false,
48
  "is_starred": false,
49
- "created_by": "system",
50
  "created_date": "2024-03-21T10:00:00Z",
51
  "last_updated_by": "system",
52
  "last_updated_date": "2024-03-21T11:00:00Z",
@@ -85,7 +85,7 @@
85
  "object": "chat.completion",
86
  "is_archived": false,
87
  "is_starred": false,
88
- "created_by": "system",
89
  "created_date": "2024-03-22T10:00:00Z",
90
  "last_updated_by": "system",
91
  "last_updated_date": "2024-03-22T11:00:00Z",
 
7
  "object": "chat.completion",
8
  "is_archived": false,
9
  "is_starred": false,
10
+ "created_by": "admin",
11
  "created_date": "2024-03-20T10:00:00Z",
12
  "last_updated_by": "system",
13
  "last_updated_date": "2024-03-20T11:00:00Z",
 
46
  "object": "chat.completion",
47
  "is_archived": false,
48
  "is_starred": false,
49
+ "created_by": "admin",
50
  "created_date": "2024-03-21T10:00:00Z",
51
  "last_updated_by": "system",
52
  "last_updated_date": "2024-03-21T11:00:00Z",
 
85
  "object": "chat.completion",
86
  "is_archived": false,
87
  "is_starred": false,
88
+ "created_by": "admin",
89
  "created_date": "2024-03-22T10:00:00Z",
90
  "last_updated_by": "system",
91
  "last_updated_date": "2024-03-22T11:00:00Z",
app/core/initial_setup/setup.py CHANGED
@@ -7,7 +7,7 @@ from app.repository.chat_repository import ChatRepository
7
  from app.config.db import db_config
8
 
9
  class InitialSetup:
10
- """Initial setup manager for the application"""
11
 
12
  def __init__(self):
13
  self._chat_repository: Optional[ChatRepository] = None
 
7
  from app.config.db import db_config
8
 
9
  class InitialSetup:
10
+ """Initial setup manager for the application when database type is embedded"""
11
 
12
  def __init__(self):
13
  self._chat_repository: Optional[ChatRepository] = None
app/schema/chat_schema.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import List
2
  from pydantic import BaseModel, Field
3
  from enum import Enum
4
 
@@ -24,25 +24,18 @@ class MessageRequest(BaseModel):
24
  content: str = Field(..., description="The content of the message")
25
 
26
 
27
- class ChatCompletionBase(BaseModel):
 
28
  """
29
- Represents a chat completion.
30
  """
31
-
32
- completion_id: str = Field(
33
  None,
34
  description="The unique identifier for the chat completion. When starting a new chat, this will be a new UUID. When continuing a previous chat, this will be the same as the previous chat completion id.",
35
  )
36
-
37
-
38
- class ChatCompletionRequest(ChatCompletionBase):
39
- """
40
- Represents a chat completion request. Starting a new chat or continuing a previous chat.
41
- """
42
-
43
- model: str = Field(None, description="The model to use for the chat completion")
44
- messages: List[MessageRequest] = Field(..., description="The messages to use for the chat completion")
45
- stream: bool = Field(..., description="Whether to stream the chat completion")
46
 
47
 
48
  class MessageResponse(BaseModel):
@@ -50,34 +43,31 @@ class MessageResponse(BaseModel):
50
  A chat completion message generated by the model.
51
  """
52
 
53
- message_id: str = Field(..., description="The unique identifier for the message")
54
- role: MessageSchemaRoleType = Field(..., description="The role of the message")
55
- content: str = Field(..., description="The content of the message")
56
- figure: dict = Field(..., description="The figure data to be visualized")
57
 
58
 
59
  class ChoiceResponse(BaseModel):
60
- finish_reason: ChoiceSchemaFinishReasonType = Field(
61
- ...,
62
  description="The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters",
63
  )
64
- index: int = Field(..., description="The index of the choice in the list of choices.")
65
- message: MessageResponse = Field(..., description="The message to use for the chat completion")
66
  # logprobs: str = None # not implemented yet
67
 
68
 
69
- class ChatCompletionResponse(ChatCompletionBase):
70
  """
71
  Represents a chat completion response returned by model, based on the provided input.
72
  """
73
 
74
- completion_id: str = Field(..., description="The unique identifier for the chat completion")
75
- choices: List[ChoiceResponse] = Field(..., description="A list of chat completion choices.")
76
- created: int = Field(
77
- ...,
78
- description="The Unix timestamp (in seconds) of when the chat completion was created.",
79
- )
80
- model: str = Field(..., description="The model used for the chat completion")
81
  # not implemented yet
82
  # service_tier: Optional[ServiceTier] = ServiceTier.AUTO
83
  # system_fingerprint: Optional[StrictStr] = Field(default=None, description="This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism. ")
 
1
+ from typing import List, Optional
2
  from pydantic import BaseModel, Field
3
  from enum import Enum
4
 
 
24
  content: str = Field(..., description="The content of the message")
25
 
26
 
27
+
28
+ class ChatCompletionRequest(BaseModel):
29
  """
30
+ Represents a chat completion request. Starting a new chat or continuing a previous chat.
31
  """
32
+ completion_id: Optional[str] = Field(
 
33
  None,
34
  description="The unique identifier for the chat completion. When starting a new chat, this will be a new UUID. When continuing a previous chat, this will be the same as the previous chat completion id.",
35
  )
36
+ model: Optional[str] = Field(None, description="The model to use for the chat completion")
37
+ messages: Optional[List[MessageRequest]] = Field(None, description="The messages to use for the chat completion")
38
+ stream: Optional[bool] = Field(None, description="Whether to stream the chat completion")
 
 
 
 
 
 
 
39
 
40
 
41
  class MessageResponse(BaseModel):
 
43
  A chat completion message generated by the model.
44
  """
45
 
46
+ message_id: Optional[str] = Field(None, description="The unique identifier for the message")
47
+ role: Optional[MessageSchemaRoleType] = Field(None, description="The role of the message")
48
+ content: Optional[str] = Field(None, description="The content of the message")
49
+ figure: Optional[dict] = Field(None, description="The figure data to be visualized")
50
 
51
 
52
  class ChoiceResponse(BaseModel):
53
+ finish_reason: Optional[ChoiceSchemaFinishReasonType] = Field(
54
+ None,
55
  description="The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters",
56
  )
57
+ index: Optional[int] = Field(None, description="The index of the choice in the list of choices.")
58
+ message: Optional[MessageResponse] = Field(None, description="The message to use for the chat completion")
59
  # logprobs: str = None # not implemented yet
60
 
61
 
62
+ class ChatCompletionResponse(BaseModel):
63
  """
64
  Represents a chat completion response returned by model, based on the provided input.
65
  """
66
 
67
+ completion_id: Optional[str] = Field(None, description="The unique identifier for the chat completion")
68
+ choices: Optional[List[ChoiceResponse]] = Field(None, description="A list of chat completion choices.")
69
+ created: Optional[int] = Field(None, description="The Unix timestamp (in seconds) of when the chat completion was created.")
70
+ model: Optional[str] = Field(None, description="The model used for the chat completion")
 
 
 
71
  # not implemented yet
72
  # service_tier: Optional[ServiceTier] = ServiceTier.AUTO
73
  # system_fingerprint: Optional[StrictStr] = Field(default=None, description="This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism. ")
app/service/chat_service.py CHANGED
@@ -9,7 +9,7 @@ from app.schema.chat_schema import (
9
  )
10
  from app.model.chat_model import ChatCompletion, ChatMessage
11
  import uuid
12
-
13
  from app.schema.conversation import ConversationResponse
14
 
15
 
@@ -31,7 +31,7 @@ class ChatService:
31
  entity.last_updated_by = username
32
  entity.last_updated_date = datetime.datetime.now()
33
 
34
- entity = self.chat_repository.save(entity)
35
 
36
  result = ChatCompletionResponse(**entity.model_dump())
37
  messages = [MessageResponse(**{"role": "assistant", "content": response_content})] # TODO: implement ai-agent response
@@ -46,21 +46,16 @@ class ChatService:
46
  ]
47
  return result
48
 
49
- async def find(
50
- self,
51
- query: dict,
52
- page: int,
53
- limit: int,
54
- sort: dict,
55
- project: dict = None,
56
- ) -> List[ChatCompletion]:
57
- return self.chat_repository.find(query, page, limit, sort, project)
58
 
59
  async def find_by_id(self, completion_id: str, project: dict = None) -> ChatCompletion:
60
- return self.chat_repository.find_by_id(completion_id, project)
61
 
62
  async def find_messages(self, completion_id: str) -> List[ChatMessage]:
63
- return self.chat_repository.find_messages(completion_id)
64
 
65
  # conversation service
66
  async def find_all_conversations(self, username: str) -> List[ConversationResponse]:
 
9
  )
10
  from app.model.chat_model import ChatCompletion, ChatMessage
11
  import uuid
12
+ from loguru import logger
13
  from app.schema.conversation import ConversationResponse
14
 
15
 
 
31
  entity.last_updated_by = username
32
  entity.last_updated_date = datetime.datetime.now()
33
 
34
+ entity = await self.chat_repository.save(entity)
35
 
36
  result = ChatCompletionResponse(**entity.model_dump())
37
  messages = [MessageResponse(**{"role": "assistant", "content": response_content})] # TODO: implement ai-agent response
 
46
  ]
47
  return result
48
 
49
+ async def find(self, query: dict, page: int, limit: int, sort: dict, project: dict = None) -> List[ChatCompletionResponse]:
50
+ logger.debug(f"BEGIN SERVICE: find for query: {query}, page: {page}, limit: {limit}, sort: {sort}, project: {project}")
51
+ entities = await self.chat_repository.find(query, page, limit, sort, project)
52
+ return [ChatCompletionResponse(**entity.model_dump()) for entity in entities]
 
 
 
 
 
53
 
54
  async def find_by_id(self, completion_id: str, project: dict = None) -> ChatCompletion:
55
+ return await self.chat_repository.find_by_id(completion_id, project)
56
 
57
  async def find_messages(self, completion_id: str) -> List[ChatMessage]:
58
+ return await self.chat_repository.find_messages(completion_id)
59
 
60
  # conversation service
61
  async def find_all_conversations(self, username: str) -> List[ConversationResponse]: