ramanjitsingh1368 commited on
Commit
eb474ee
·
1 Parent(s): 8eb6511

code refactor

Browse files
src/app.py CHANGED
@@ -4,7 +4,7 @@ from contextlib import asynccontextmanager
4
  from fastapi import FastAPI
5
  from fastapi.middleware.cors import CORSMiddleware
6
 
7
- from src.controllers import api_router, websocket_router
8
  from src.config import logger, DatabaseConfig
9
  from src.middlewares import AuthenticationMiddleware
10
 
@@ -45,5 +45,4 @@ async def check_health():
45
  return {"response": "Service is healthy!"}
46
 
47
 
48
- app.include_router(api_router, prefix="/api")
49
- app.include_router(websocket_router, prefix="/ws")
 
4
  from fastapi import FastAPI
5
  from fastapi.middleware.cors import CORSMiddleware
6
 
7
+ from src.controllers import api_router
8
  from src.config import logger, DatabaseConfig
9
  from src.middlewares import AuthenticationMiddleware
10
 
 
45
  return {"response": "Service is healthy!"}
46
 
47
 
48
+ app.include_router(api_router, prefix="/api")
 
src/config/_database.py CHANGED
@@ -2,7 +2,7 @@ import os
2
  from beanie import init_beanie
3
  from motor.motor_asyncio import AsyncIOMotorClient
4
 
5
- from src.models import User, Session
6
 
7
 
8
  class DatabaseConfig:
@@ -10,9 +10,19 @@ class DatabaseConfig:
10
 
11
  self.MONGO_URI = os.getenv("MONGO_DB_URI")
12
  self.MONGO_DB_NAME = os.getenv("MONGO_DB_NAME")
13
- self.client = AsyncIOMotorClient(self.MONGO_URI)
14
  self.db = self.client[self.MONGO_DB_NAME]
15
 
16
  async def init_beanie(self):
17
 
18
- await init_beanie(database=self.db, document_models=[User, Session])
 
 
 
 
 
 
 
 
 
 
 
2
  from beanie import init_beanie
3
  from motor.motor_asyncio import AsyncIOMotorClient
4
 
5
+ from src.models import User, Session, File, VectorStoreRecordId, Conversation, Message
6
 
7
 
8
  class DatabaseConfig:
 
10
 
11
  self.MONGO_URI = os.getenv("MONGO_DB_URI")
12
  self.MONGO_DB_NAME = os.getenv("MONGO_DB_NAME")
13
+ self.client = AsyncIOMotorClient(self.MONGO_URI, uuidRepresentation="standard")
14
  self.db = self.client[self.MONGO_DB_NAME]
15
 
16
  async def init_beanie(self):
17
 
18
+ await init_beanie(
19
+ database=self.db,
20
+ document_models=[
21
+ User,
22
+ Session,
23
+ Conversation,
24
+ Message,
25
+ File,
26
+ VectorStoreRecordId,
27
+ ],
28
+ )
src/controllers/__init__.py CHANGED
@@ -1,28 +1,23 @@
1
  from fastapi import APIRouter, Depends
2
  from fastapi.security import APIKeyHeader
 
3
  from ._auth_controller import AuthController
4
- from ._ai_voice_controller import AIVoiceController
5
- from ._ai_text_controller import AITextController
6
- from ._pinecone_controller import PineconeController
7
 
8
  api_router = APIRouter()
9
- websocket_router = APIRouter()
10
 
11
- _auth_controller = AuthController()
12
- _ai_voice_controller = AIVoiceController()
13
- _ai_text_controller = AITextController()
14
- _pinecone_controller = PineconeController()
15
 
16
  API_KEY_HEADER = APIKeyHeader(name="Authorization", auto_error=False)
17
 
18
- websocket_router.include_router(_ai_voice_controller.websocket_router, prefix="/v1")
19
- websocket_router.include_router(_ai_text_controller.websocket_router, prefix="/v1")
20
-
21
  api_router.include_router(_auth_controller.api_router, prefix="/v1", dependencies=[Depends(API_KEY_HEADER)])
22
- api_router.include_router(_ai_voice_controller.api_router, prefix="/v1")
23
- api_router.include_router(_ai_text_controller.api_router, prefix="/v1")
24
- api_router.include_router(_pinecone_controller.api_router, prefix="/v1")
25
 
26
- __all__ = ["api_router", "websocket_router"]
27
  __version__ = "0.1.0"
28
  __author__ = "Ramanjit Singh"
 
1
  from fastapi import APIRouter, Depends
2
  from fastapi.security import APIKeyHeader
3
+
4
  from ._auth_controller import AuthController
5
+ from ._file_controller import FileController
6
+ from ._conversation_controller import ConversationController
 
7
 
8
  api_router = APIRouter()
 
9
 
10
+ auth_router = AuthController().api_router
11
+ conversation_router = ConversationController().api_router
12
+ file_router = FileController().api_router
 
13
 
14
  API_KEY_HEADER = APIKeyHeader(name="Authorization", auto_error=False)
15
 
 
 
 
16
  api_router.include_router(_auth_controller.api_router, prefix="/v1", dependencies=[Depends(API_KEY_HEADER)])
17
+ api_router.include_router(auth_router, prefix="/v1", tags=["Authentication"])
18
+ api_router.include_router(conversation_router, prefix="/v1", tags=["Conversation"])
19
+ api_router.include_router(file_router, prefix="/v1", tags=["Semantic Search"])
20
 
21
+ __all__ = ["api_router"]
22
  __version__ = "0.1.0"
23
  __author__ = "Ramanjit Singh"
src/controllers/_ai_text_controller.py DELETED
@@ -1,31 +0,0 @@
1
- from fastapi import APIRouter, WebSocket, HTTPException
2
-
3
- from src.config import logger
4
- from src.services import AITextService
5
-
6
-
7
- class AITextController:
8
- def __init__(self):
9
- self.service = AITextService
10
- self.api_router = APIRouter()
11
- self.websocket_router = APIRouter()
12
- self.api_router.add_api_route(
13
- "/ai/text/session", self.get_session, methods=["GET"]
14
- )
15
- self.websocket_router.add_websocket_route("/ai/text", self.websocket)
16
-
17
- async def get_session(self):
18
- try:
19
- async with self.service() as service:
20
- return await service.create_openai_session()
21
- except Exception as e:
22
- logger.error(f"Error while building session: {str(e)}")
23
- raise HTTPException(status_code=500, detail=str(e))
24
-
25
- async def websocket(self, websocket: WebSocket):
26
- try:
27
- async with self.service() as service:
28
- return await service.openai_websocket_connection(websocket)
29
- except Exception as e:
30
- logger.error(f"Error during WebSocket connection: {str(e)}")
31
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/controllers/_conversation_controller.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import (
2
+ APIRouter,
3
+ HTTPException,
4
+ WebSocket,
5
+ Query,
6
+ Body,
7
+ Path,
8
+ WebSocketDisconnect,
9
+ )
10
+ from pydantic import BaseModel
11
+ from src.config import logger
12
+ from src.services import ConversationService
13
+
14
+
15
+ class OfferModel(BaseModel):
16
+ sdp: str
17
+ type: str
18
+
19
+
20
+ class ConversationController:
21
+ def __init__(self):
22
+ self.service = ConversationService
23
+ self.api_router = APIRouter()
24
+ self.api_router.add_api_route(
25
+ "/conversations", self.create_conversation, methods=["POST"]
26
+ )
27
+ self.api_router.add_api_route(
28
+ "/conversations/{conversation_id}/webrtc",
29
+ self.create_webrtc_connection,
30
+ methods=["POST"],
31
+ )
32
+ self.api_router.add_websocket_route("/conversations", self.conversation)
33
+
34
+ async def create_conversation(self, modality: str = Query(...)):
35
+ try:
36
+ logger.info(f"Creating conversation with modality: {modality}")
37
+ user_id = "76ebd74f-7395-455b-bb7a-9e890a51e7a4"
38
+ async with self.service() as service:
39
+ return await service.create_conversation(
40
+ user_id=user_id, modality=modality
41
+ )
42
+ except Exception as e:
43
+ logger.error(f"Error creating conversation: {str(e)}")
44
+ raise HTTPException(status_code=500, detail="Failed to create conversation")
45
+
46
+ async def create_webrtc_connection(
47
+ self,
48
+ conversation_id: str = Path(...),
49
+ ephemeral_key: str = Body(...),
50
+ offer: OfferModel = Body(...),
51
+ ):
52
+ try:
53
+ async with self.service() as service:
54
+ return await service.create_webrtc_connection(
55
+ conversation_id=conversation_id,
56
+ ephemeral_key=ephemeral_key,
57
+ offer=offer.model_dump(),
58
+ )
59
+ except Exception as e:
60
+ logger.error(f"Error creating WebRTC connection: {str(e)}")
61
+ raise HTTPException(
62
+ status_code=500, detail="Failed to create WebRTC connection"
63
+ )
64
+
65
+ async def conversation(self, websocket: WebSocket):
66
+ try:
67
+ async with self.service() as service:
68
+ await service.conversation(websocket=websocket)
69
+ except WebSocketDisconnect:
70
+ logger.info(f"WebSocket disconnected")
71
+ except Exception as e:
72
+ logger.error(f"Error in WebSocket conversation: {str(e)}")
73
+ await websocket.send_json({"error": "Unexpected error occurred"})
74
+ await websocket.close()
src/controllers/{_ai_voice_controller.py → _file_controller.py} RENAMED
@@ -1,31 +1,31 @@
1
- from fastapi import APIRouter, WebSocket, HTTPException
2
 
3
  from src.config import logger
4
- from src.services import AIVoiceService
5
 
6
 
7
- class AIVoiceController:
8
  def __init__(self):
9
- self.service = AIVoiceService
10
  self.api_router = APIRouter()
11
- self.websocket_router = APIRouter()
12
  self.api_router.add_api_route(
13
- "/ai/voice/session", self.get_session, methods=["GET"]
14
  )
15
- self.websocket_router.add_websocket_route("/ai/voice", self.websocket)
16
 
17
- async def get_session(self):
18
  try:
 
19
  async with self.service() as service:
20
- return await service.create_openai_session()
21
  except Exception as e:
22
- logger.error(f"Error while building session: {str(e)}")
23
  raise HTTPException(status_code=500, detail=str(e))
24
 
25
- async def websocket(self, websocket: WebSocket):
26
  try:
27
  async with self.service() as service:
28
- return await service.openai_websocket_connection(websocket)
29
  except Exception as e:
30
- logger.error(f"Error during WebSocket connection: {str(e)}")
31
  raise HTTPException(status_code=500, detail=str(e))
 
1
+ from fastapi import APIRouter, HTTPException, UploadFile, File
2
 
3
  from src.config import logger
4
+ from src.services import FileService
5
 
6
 
7
+ class FileController:
8
  def __init__(self):
9
+ self.service = FileService
10
  self.api_router = APIRouter()
11
+ self.api_router.add_api_route("/files", self.insert_file, methods=["POST"])
12
  self.api_router.add_api_route(
13
+ "/files/search", self.semantic_search, methods=["GET"]
14
  )
 
15
 
16
+ async def insert_file(self, file: UploadFile = File(...)):
17
  try:
18
+ user_id = "76ebd74f-7395-455b-bb7a-9e890a51e7a4"
19
  async with self.service() as service:
20
+ return await service.insert_file(file=file, user_id=user_id)
21
  except Exception as e:
22
+ logger.error(f"Error while inserting file: {str(e)}")
23
  raise HTTPException(status_code=500, detail=str(e))
24
 
25
+ async def semantic_search(self, query: str):
26
  try:
27
  async with self.service() as service:
28
+ return await service.semantic_search(query=query)
29
  except Exception as e:
30
+ logger.error(f"Error while searching for similar data: {str(e)}")
31
  raise HTTPException(status_code=500, detail=str(e))
src/controllers/_pinecone_controller.py DELETED
@@ -1,43 +0,0 @@
1
- from fastapi import APIRouter, HTTPException, UploadFile, Form, File
2
-
3
- from src.config import logger
4
- from src.services import PineconeService
5
-
6
-
7
- class PineconeController:
8
- def __init__(self):
9
- self.pinecone_service = PineconeService
10
- self.api_router = APIRouter()
11
- self.api_router.add_api_route(
12
- "/pinecone/insert", self.insert_data, methods=["POST"]
13
- )
14
- self.api_router.add_api_route(
15
- "/pinecone/search", self.similarity_search, methods=["POST"]
16
- )
17
- self.api_router.add_api_route(
18
- "/pinecone/delete", self.delete, methods=["DELETE"]
19
- )
20
-
21
- async def insert_data(self, file: UploadFile):
22
- try:
23
- async with self.pinecone_service() as service:
24
- return await service.insert_data(file=file)
25
- except Exception as e:
26
- logger.error(f"Error while inserting data: {str(e)}")
27
- raise HTTPException(status_code=500, detail=str(e))
28
-
29
- async def similarity_search(self, query: str):
30
- try:
31
- async with self.pinecone_service() as service:
32
- return await service.similarity_search(query=query)
33
- except Exception as e:
34
- logger.error(f"Error while searching for similar data: {str(e)}")
35
- raise HTTPException(status_code=500, detail=str(e))
36
-
37
- async def delete(self, file_name: str, file_id: str):
38
- try:
39
- async with self.pinecone_service() as service:
40
- return await service.delete(file_name=file_name, file_id=file_id)
41
- except Exception as e:
42
- logger.error(f"Error while deleting data: {str(e)}")
43
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/models/__init__.py CHANGED
@@ -1,9 +1,17 @@
1
  from ._users import User
2
  from ._sessions import Session
 
 
 
 
3
 
4
  __all__ = [
5
  "User",
6
  "Session",
 
 
 
 
7
  ]
8
  __version__ = "0.1.0"
9
  __author__ = "Narinder Singh"
 
1
  from ._users import User
2
  from ._sessions import Session
3
+ from ._conversations import Conversation
4
+ from ._messages import Message
5
+ from ._files import File
6
+ from ._vector_store_record_ids import VectorStoreRecordId
7
 
8
  __all__ = [
9
  "User",
10
  "Session",
11
+ "Conversation",
12
+ "Message",
13
+ "File",
14
+ "VectorStoreRecordId",
15
  ]
16
  __version__ = "0.1.0"
17
  __author__ = "Narinder Singh"
src/models/_conversations.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from typing import Optional
3
+ from beanie import Document, Link
4
+ from uuid import UUID, uuid4
5
+ from pydantic import Field
6
+
7
+ from ._users import User
8
+
9
+
10
+ class Conversation(Document):
11
+ id: UUID = Field(default_factory=uuid4, alias="_id")
12
+ user_id: Link[User] = Field(...)
13
+ summary: Optional[str] = None
14
+ created_at: datetime = Field(default_factory=datetime.now)
15
+ updated_at: datetime = Field(default_factory=datetime.now)
16
+
17
+ class Settings:
18
+ name = "conversations"
src/models/_files.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from beanie import Document, Link
3
+ from uuid import UUID, uuid4
4
+ from pydantic import Field
5
+
6
+ from ._users import User
7
+
8
+
9
+ class File(Document):
10
+ id: UUID = Field(default_factory=uuid4, alias="_id")
11
+ user_id: Link[User]
12
+ file_name: str = Field(...)
13
+ file_type: str = Field(...)
14
+ created_at: datetime = Field(default_factory=datetime.now)
15
+ updated_at: datetime = Field(default_factory=datetime.now)
16
+
17
+ class Settings:
18
+ name = "files"
src/models/_messages.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from beanie import Document, Link
3
+ from uuid import UUID, uuid4
4
+ from pydantic import Field
5
+
6
+ from ._conversations import Conversation
7
+
8
+
9
+ class Message(Document):
10
+ id: UUID = Field(default_factory=uuid4, alias="_id")
11
+ conversation_id: Link[Conversation]
12
+ role: str = Field(...)
13
+ content: str = Field(...)
14
+ created_at: datetime = Field(default_factory=datetime.now)
15
+ updated_at: datetime = Field(default_factory=datetime.now)
16
+
17
+ class Settings:
18
+ name = "messages"
src/models/_users.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from pydantic import Field
2
  from typing import Annotated
3
  from datetime import datetime
 
1
+ from typing import Annotated
2
  from pydantic import Field
3
  from typing import Annotated
4
  from datetime import datetime
src/models/_vector_store_record_ids.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from beanie import Document, Link
3
+ from uuid import UUID, uuid4
4
+ from pydantic import Field
5
+
6
+ from ._files import File
7
+
8
+
9
+ class VectorStoreRecordId(Document):
10
+ id: UUID = Field(default_factory=uuid4, alias="_id")
11
+ file_id: Link[File]
12
+ record_id: str = Field(...)
13
+ created_at: datetime = Field(default_factory=datetime.now)
14
+ updated_at: datetime = Field(default_factory=datetime.now)
15
+
16
+ class Settings:
17
+ name = "vector_store_record_ids"
src/repositories/__init__.py CHANGED
@@ -1,12 +1,21 @@
1
- from ._user_repository import UserRepository
2
  from ._base_repository import BaseRepository
 
3
  from ._session_repository import SessionRepository
 
 
 
 
 
4
 
5
  __all__ = [
6
  "UserRepository",
7
  "BaseRepository",
8
  "SessionRepository",
 
 
 
 
9
  ]
10
 
11
  __version__ = "0.1.0"
12
- __author__ = "Ramanjit Singh"
 
 
1
  from ._base_repository import BaseRepository
2
+ from ._user_repository import UserRepository
3
  from ._session_repository import SessionRepository
4
+ from ._conversation_repository import ConversationRepository
5
+ from ._message_repository import MessageRepository
6
+ from ._file_repository import FileRepository
7
+ from ._vector_store_record_id_repository import VectorStoreRecordIdRepository
8
+
9
 
10
  __all__ = [
11
  "UserRepository",
12
  "BaseRepository",
13
  "SessionRepository",
14
+ "ConversationRepository",
15
+ "MessageRepository",
16
+ "FileRepository",
17
+ "VectorStoreRecordIdRepository",
18
  ]
19
 
20
  __version__ = "0.1.0"
21
+ __author__ = "Ramanjit Singh"
src/repositories/_conversation_repository.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from src.models import Conversation
2
+
3
+ from ._base_repository import BaseRepository
4
+
5
+
6
+ class ConversationRepository(BaseRepository):
7
+ def __init__(self):
8
+ super().__init__(model=Conversation)
src/repositories/_file_repository.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from src.models import File
2
+
3
+ from ._base_repository import BaseRepository
4
+
5
+
6
+ class FileRepository(BaseRepository):
7
+ def __init__(self):
8
+ super().__init__(model=File)
src/repositories/_message_repository.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from src.models import Message
2
+
3
+ from ._base_repository import BaseRepository
4
+
5
+
6
+ class MessageRepository(BaseRepository):
7
+ def __init__(self):
8
+ super().__init__(model=Message)
src/repositories/_vector_store_record_id_repository.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from src.models import VectorStoreRecordId
2
+
3
+ from ._base_repository import BaseRepository
4
+
5
+
6
+ class VectorStoreRecordIdRepository(BaseRepository):
7
+ def __init__(self):
8
+ super().__init__(model=VectorStoreRecordId)
src/services/__init__.py CHANGED
@@ -1,17 +1,16 @@
1
  from ._auth_service import AuthService
2
  from ._user_service import UserService
3
- from ._ai_text_service import AITextService
4
  from ._session_service import SessionService
5
- from ._ai_voice_service import AIVoiceService
6
- from ._pinecone_service import PineconeService
7
 
8
  __all__ = [
 
9
  "AuthService",
10
  "UserService",
11
- "AITextService",
12
  "SessionService",
13
- "AIVoiceService",
14
- "PineconeService",
15
  ]
16
 
17
  __version__ = "0.1.0"
 
1
  from ._auth_service import AuthService
2
  from ._user_service import UserService
3
+ from ._file_service import FileService
4
  from ._session_service import SessionService
5
+ from ._conversation_service import ConversationService
6
+
7
 
8
  __all__ = [
9
+ "FileService",
10
  "AuthService",
11
  "UserService",
 
12
  "SessionService",
13
+ "ConversationService",
 
14
  ]
15
 
16
  __version__ = "0.1.0"
src/services/_ai_voice_service.py DELETED
@@ -1,100 +0,0 @@
1
- import json
2
-
3
- from ._pinecone_service import PineconeService
4
- from src.config import logger
5
- from src.utils import OpenAIClient
6
-
7
-
8
- class AIVoiceService:
9
- def __init__(self):
10
- self.openai_client = OpenAIClient
11
- self.pinecone_service = PineconeService
12
- self.functions_dictionary = {
13
- "get_relevant_information": {
14
- "service": self.pinecone_service,
15
- "function": "similarity_search",
16
- },
17
- }
18
-
19
- async def __aenter__(self):
20
- return self
21
-
22
- async def __aexit__(self, *args):
23
- pass
24
-
25
- async def handle_user_transcript(self, message):
26
- logger.info(f"User transcript: {message['transcript']}")
27
-
28
- async def handle_ai_transcript(self, message):
29
- if (
30
- message["response"]["status"] == "completed"
31
- and message["response"]["output"][0]["type"] == "message"
32
- ):
33
- logger.info(
34
- f"Assistant transcript: {message["response"]["output"][0]["content"][0]["transcript"]}"
35
- )
36
-
37
- async def handle_ai_function_call(self, data):
38
- tool_name = data["name"]
39
- arguments = json.loads(data["arguments"])
40
- logger.info(f"Function call: {tool_name} \nArguments: {arguments}")
41
-
42
- function_info = self.functions_dictionary.get(tool_name)
43
- if not function_info:
44
- raise AttributeError(f"Function {tool_name} not found in dictionary")
45
-
46
- service_class = function_info["service"]
47
- async with service_class() as service:
48
- func = getattr(service, function_info["function"], None)
49
- if not func:
50
- raise AttributeError(
51
- f"No such function {function_info['function']} in service"
52
- )
53
- response = await func(arguments["query"])
54
- response = f"Context : {response}\n\n# Use this context to respond to the user query : {arguments['query']}"
55
- return response
56
-
57
- async def create_openai_session(self):
58
- async with self.openai_client() as client:
59
- return await client.create_openai_session()
60
-
61
- async def openai_websocket_connection(self, websocket):
62
- openai_server_events = {
63
- "function_call": "response.function_call_arguments.done",
64
- "input_transcription": "conversation.item.input_audio_transcription.completed",
65
- "output_transcription": "response.done",
66
- }
67
- await websocket.accept()
68
-
69
- while True:
70
- data = await websocket.receive_text()
71
- message = json.loads(data)
72
-
73
- if message["type"] == openai_server_events["function_call"]:
74
- response = await self.handle_ai_function_call(message)
75
- event_response = {
76
- "type": "conversation.item.create",
77
- "previous_item_id": None,
78
- "item": {
79
- "type": "message",
80
- "role": "user",
81
- "content": [
82
- {
83
- "type": "input_text",
84
- "text": response,
85
- }
86
- ],
87
- },
88
- }
89
- await websocket.send_text(json.dumps(event_response))
90
- event_response_2 = {"type": "response.create"}
91
- await websocket.send_text(json.dumps(event_response_2))
92
-
93
- elif message["type"] == openai_server_events["input_transcription"]:
94
- await self.handle_user_transcript(message)
95
-
96
- elif message["type"] == openai_server_events["output_transcription"]:
97
- await self.handle_ai_transcript(message)
98
-
99
- elif message["type"] == "error":
100
- logger.error(f"Error: {message['error']}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/services/{_ai_text_service.py → _conversation_service.py} RENAMED
@@ -1,18 +1,25 @@
1
  import json
 
 
 
2
 
3
- from ._pinecone_service import PineconeService
4
- from src.config import logger
5
  from src.utils import OpenAIClient
 
 
 
6
 
7
 
8
- class AITextService:
9
  def __init__(self):
10
  self.openai_client = OpenAIClient
11
- self.pinecone_service = PineconeService
 
 
12
  self.functions_dictionary = {
13
  "get_relevant_information": {
14
- "service": self.pinecone_service,
15
- "function": "similarity_search",
16
  },
17
  }
18
 
@@ -22,47 +29,63 @@ class AITextService:
22
  async def __aexit__(self, *args):
23
  pass
24
 
25
- async def handle_user_message(self, user_message):
26
- logger.info(f"User message: {user_message}")
 
 
27
 
28
- async def handle_ai_response(self, ai_response):
29
- logger.info(f"AI response: {ai_response}")
 
 
 
30
 
31
- async def handle_ai_function_call(self, data):
32
- tool_name = data["name"]
33
- arguments = json.loads(data["arguments"])
34
- logger.info(f"Function call: {tool_name} \nArguments: {arguments}")
35
 
36
- function_info = self.functions_dictionary.get(tool_name)
37
- if not function_info:
38
- raise AttributeError(f"Function {tool_name} not found in dictionary")
 
 
39
 
40
- service_class = function_info["service"]
41
- async with service_class() as service:
42
- func = getattr(service, function_info["function"], None)
43
- if not func:
44
- raise AttributeError(
45
- f"No such function {function_info['function']} in service"
46
- )
47
- response = await func(arguments["query"])
48
- response = f"Context : {response}\n\n# Use this context to respond to the user query : {arguments['query']}"
49
- return response
50
 
51
- async def create_openai_session(self):
52
- async with self.openai_client() as client:
53
- return await client.create_openai_session(text_mode_only=True)
54
 
55
- async def openai_websocket_connection(self, websocket):
56
- await websocket.accept()
 
 
 
57
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  while True:
59
  data = await websocket.receive_text()
60
  message = json.loads(data)
61
 
62
  openai_server_events = {
63
  "session_created": message["type"] == "session.created",
64
- "function_call": message["type"]
65
- == "response.function_call_arguments.done",
 
66
  "ai_response": (
67
  message["type"] == "response.done"
68
  and message["response"]["status"] == "completed"
@@ -73,7 +96,9 @@ class AITextService:
73
 
74
  if openai_server_events["session_created"]:
75
  user_query = input("Enter your query: ")
76
- await self.handle_user_message(user_message=user_query)
 
 
77
 
78
  event_stage_1 = {
79
  "type": "conversation.item.create",
@@ -115,10 +140,14 @@ class AITextService:
115
 
116
  if openai_server_events["ai_response"]:
117
  ai_response = message["response"]["output"][0]["content"][0]["text"]
118
- await self.handle_ai_response(ai_response=ai_response)
 
 
119
 
120
  user_query = input("Enter your query: ")
121
- await self.handle_user_message(user_message=user_query)
 
 
122
 
123
  event_stage_1 = {
124
  "type": "conversation.item.create",
@@ -140,3 +169,103 @@ class AITextService:
140
 
141
  elif message["type"] == "error":
142
  logger.error(f"Error: {message['error']}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import json
2
+ from fastapi import WebSocket
3
+ from uuid import UUID
4
+ from bson import ObjectId
5
 
6
+ from ._file_service import FileService
 
7
  from src.utils import OpenAIClient
8
+ from src.repositories import ConversationRepository, MessageRepository
9
+ from src.models import Conversation, Message
10
+ from src.config import logger
11
 
12
 
13
+ class ConversationService:
14
  def __init__(self):
15
  self.openai_client = OpenAIClient
16
+ self.conversation_repository = ConversationRepository()
17
+ self.message_repository = MessageRepository()
18
+ self.file_service = FileService
19
  self.functions_dictionary = {
20
  "get_relevant_information": {
21
+ "service": self.file_service,
22
+ "function": "semantic_search",
23
  },
24
  }
25
 
 
29
  async def __aexit__(self, *args):
30
  pass
31
 
32
+ async def create_conversation(self, user_id, modality):
33
+ conversation = await self.conversation_repository.insert_one(
34
+ Conversation(user_id=user_id)
35
+ )
36
 
37
+ text_mode_only = True if modality == "text" else False
38
+ async with self.openai_client() as client:
39
+ session_data = await client.create_openai_session(
40
+ text_mode_only=text_mode_only
41
+ )
42
 
43
+ return {
44
+ "conversation_id": str(conversation.id),
45
+ "session_data": session_data,
46
+ }
47
 
48
+ async def create_webrtc_connection(self, conversation_id, ephemeral_key, offer):
49
+ async with self.openai_client() as client:
50
+ return await client.create_webrtc_connection(
51
+ ephemeral_key=ephemeral_key, offer=offer
52
+ )
53
 
54
+ async def conversation(self, websocket: WebSocket):
55
+ try:
56
+ query_params = websocket.query_params
57
+ conversation_id = query_params.get("conversation_id")
58
+ modality = query_params.get("modality")
 
 
 
 
 
59
 
60
+ await websocket.accept()
 
 
61
 
62
+ if not conversation_id or not modality:
63
+ await websocket.close(
64
+ code=1008, reason="Missing or invalid query parameters"
65
+ )
66
+ return
67
 
68
+ if modality == "text":
69
+ await self.handle_text_conversion(websocket, conversation_id)
70
+ elif modality == "voice":
71
+ await self.handle_voice_conversion(websocket, conversation_id)
72
+ else:
73
+ await websocket.close(code=1008, reason="Unsupported modality")
74
+ return
75
+ finally:
76
+ await self.handle_conversation_summary(conversation_id)
77
+ # await websocket.close()
78
+
79
+ async def handle_text_conversion(self, websocket, conversation_id):
80
  while True:
81
  data = await websocket.receive_text()
82
  message = json.loads(data)
83
 
84
  openai_server_events = {
85
  "session_created": message["type"] == "session.created",
86
+ "function_call": (
87
+ message["type"] == "response.function_call_arguments.done"
88
+ ),
89
  "ai_response": (
90
  message["type"] == "response.done"
91
  and message["response"]["status"] == "completed"
 
96
 
97
  if openai_server_events["session_created"]:
98
  user_query = input("Enter your query: ")
99
+ await self.handle_user_message(
100
+ message_content=user_query, conversation_id=conversation_id
101
+ )
102
 
103
  event_stage_1 = {
104
  "type": "conversation.item.create",
 
140
 
141
  if openai_server_events["ai_response"]:
142
  ai_response = message["response"]["output"][0]["content"][0]["text"]
143
+ await self.handle_ai_message(
144
+ message_content=ai_response, conversation_id=conversation_id
145
+ )
146
 
147
  user_query = input("Enter your query: ")
148
+ await self.handle_user_message(
149
+ message_content=user_query, conversation_id=conversation_id
150
+ )
151
 
152
  event_stage_1 = {
153
  "type": "conversation.item.create",
 
169
 
170
  elif message["type"] == "error":
171
  logger.error(f"Error: {message['error']}")
172
+
173
+ async def handle_voice_conversion(self, websocket, conversation_id):
174
+ while True:
175
+ data = await websocket.receive_text()
176
+ message = json.loads(data)
177
+
178
+ openai_server_events = {
179
+ "function_call": (
180
+ message["type"] == "response.function_call_arguments.done"
181
+ ),
182
+ "input_transcription": (
183
+ message["type"]
184
+ == "conversation.item.input_audio_transcription.completed"
185
+ ),
186
+ "output_transcription": (
187
+ message["type"] == "response.done"
188
+ and message["response"]["status"] == "completed"
189
+ and message["response"]["output"][0]["type"] == "message"
190
+ ),
191
+ }
192
+
193
+ if openai_server_events["function_call"]:
194
+ response = await self.handle_ai_function_call(message)
195
+ event_response = {
196
+ "type": "conversation.item.create",
197
+ "previous_item_id": None,
198
+ "item": {
199
+ "type": "message",
200
+ "role": "user",
201
+ "content": [
202
+ {
203
+ "type": "input_text",
204
+ "text": response,
205
+ }
206
+ ],
207
+ },
208
+ }
209
+ await websocket.send_text(json.dumps(event_response))
210
+ event_response_2 = {"type": "response.create"}
211
+ await websocket.send_text(json.dumps(event_response_2))
212
+
213
+ elif openai_server_events["input_transcription"]:
214
+ user_message = message["transcript"]
215
+ await self.handle_user_message(
216
+ message_content=user_message, conversation_id=conversation_id
217
+ )
218
+
219
+ elif openai_server_events["output_transcription"]:
220
+ ai_response = message["response"]["output"][0]["content"][0][
221
+ "transcript"
222
+ ]
223
+ await self.handle_ai_message(
224
+ message_content=ai_response, conversation_id=conversation_id
225
+ )
226
+
227
+ elif message["type"] == "error":
228
+ logger.error(f"Error: {message['error']}")
229
+
230
+ async def handle_user_message(self, message_content, conversation_id):
231
+ logger.info(f"User Query: {message_content}")
232
+ message = await self.message_repository.insert_one(
233
+ Message(
234
+ conversation_id=conversation_id,
235
+ role="user",
236
+ content=message_content,
237
+ )
238
+ )
239
+
240
+ async def handle_ai_message(self, message_content, conversation_id):
241
+ logger.info(f"AI Response: {message_content}")
242
+ message = await self.message_repository.insert_one(
243
+ Message(
244
+ conversation_id=conversation_id,
245
+ role="assistant",
246
+ content=message_content,
247
+ )
248
+ )
249
+
250
+ async def handle_conversation_summary(self, conversation_id):
251
+ pass
252
+
253
+ async def handle_ai_function_call(self, data):
254
+ tool_name = data["name"]
255
+ arguments = json.loads(data["arguments"])
256
+ logger.info(f"Function call: {tool_name} \nArguments: {arguments}")
257
+
258
+ function_info = self.functions_dictionary.get(tool_name)
259
+ if not function_info:
260
+ raise AttributeError(f"Function {tool_name} not found in dictionary")
261
+
262
+ service_class = function_info["service"]
263
+ async with service_class() as service:
264
+ func = getattr(service, function_info["function"], None)
265
+ if not func:
266
+ raise AttributeError(
267
+ f"No such function {function_info['function']} in service"
268
+ )
269
+ response = await func(arguments["query"])
270
+ response = f"Context : {response}\n\n# Use this context to respond to the user query : {arguments['query']}"
271
+ return response
src/services/{_pinecone_service.py → _file_service.py} RENAMED
@@ -1,15 +1,15 @@
1
- import os
2
- import aiofiles
3
- from asynctempfile import TemporaryDirectory
4
-
5
  from fastapi import UploadFile
6
 
 
 
7
  from src.utils import PineconeClient
8
 
9
 
10
- class PineconeService:
11
  def __init__(self):
12
  self.pinecone_client = PineconeClient
 
 
13
 
14
  async def __aenter__(self):
15
  return self
@@ -17,7 +17,7 @@ class PineconeService:
17
  async def __aexit__(self, *args):
18
  pass
19
 
20
- async def insert_data(self, file: UploadFile):
21
  try:
22
  metadata = [{"file_name": file.filename}]
23
  while content := await file.read():
@@ -25,13 +25,25 @@ class PineconeService:
25
  ids = await client.insert_data(
26
  text=[content.decode()], metadata=metadata
27
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  finally:
29
  await file.close()
30
 
31
- async def similarity_search(self, query: str):
32
  async with self.pinecone_client() as client:
33
  return await client.similarity_search(query=query)
34
-
35
- async def delete(self, file_name: str, file_id: str):
36
- async with self.pinecone_client() as client:
37
- pass
 
 
 
 
 
1
  from fastapi import UploadFile
2
 
3
+ from src.repositories import FileRepository, VectorStoreRecordIdRepository
4
+ from src.models import File, VectorStoreRecordId
5
  from src.utils import PineconeClient
6
 
7
 
8
+ class FileService:
9
  def __init__(self):
10
  self.pinecone_client = PineconeClient
11
+ self.file_repository = FileRepository()
12
+ self.vector_store_record_id_repository = VectorStoreRecordIdRepository()
13
 
14
  async def __aenter__(self):
15
  return self
 
17
  async def __aexit__(self, *args):
18
  pass
19
 
20
+ async def insert_file(self, file: UploadFile, user_id: str):
21
  try:
22
  metadata = [{"file_name": file.filename}]
23
  while content := await file.read():
 
25
  ids = await client.insert_data(
26
  text=[content.decode()], metadata=metadata
27
  )
28
+
29
+ file_data = await self.file_repository.insert_one(
30
+ File(
31
+ user_id=user_id,
32
+ file_name=file.filename,
33
+ file_type=file.content_type,
34
+ )
35
+ )
36
+
37
+ for id in ids:
38
+ await self.vector_store_record_id_repository.insert_one(
39
+ VectorStoreRecordId(file_id=file_data.id, record_id=id)
40
+ )
41
+
42
+ return file_data
43
+
44
  finally:
45
  await file.close()
46
 
47
+ async def semantic_search(self, query):
48
  async with self.pinecone_client() as client:
49
  return await client.similarity_search(query=query)
 
 
 
 
src/utils/_openai_client.py CHANGED
@@ -1,5 +1,8 @@
1
  import os
2
  import httpx
 
 
 
3
  from src.config import logger
4
 
5
 
@@ -25,13 +28,14 @@ _OPENAI_TOOLS = [
25
  class OpenAIClient:
26
  def __init__(self):
27
  self.session_url = "https://api.openai.com/v1/realtime/sessions"
28
- self.model = "gpt-4o-realtime-preview-2024-12-17"
 
29
  self.modalities = ["text", "audio"]
30
  self.voice = "alloy"
31
  self.transcription_model = "whisper-1"
32
  self.temperature = 0.8
33
  self.max_response_output_tokens = 1000
34
- self.system_prompt = "You are a helpful AI assistant. Always provide clear and concise responses to the user's questions.Your responses must be very consize and small"
35
  self.tools = _OPENAI_TOOLS
36
 
37
  async def __aenter__(self):
@@ -63,5 +67,28 @@ class OpenAIClient:
63
  self.session_url, json=payload, headers=headers
64
  )
65
  response.raise_for_status()
66
- logger.info(f"OpenAI session created: {response.json()}")
67
  return response.json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import httpx
3
+ from fastapi import HTTPException
4
+ from aiortc import RTCPeerConnection, RTCSessionDescription
5
+
6
  from src.config import logger
7
 
8
 
 
28
  class OpenAIClient:
29
  def __init__(self):
30
  self.session_url = "https://api.openai.com/v1/realtime/sessions"
31
+ self.model = "gpt-4o-mini-realtime-preview-2024-12-17"
32
+ self.webrtc_url = f"https://api.openai.com/v1/realtime?model={self.model}"
33
  self.modalities = ["text", "audio"]
34
  self.voice = "alloy"
35
  self.transcription_model = "whisper-1"
36
  self.temperature = 0.8
37
  self.max_response_output_tokens = 1000
38
+ self.system_prompt = "You are a helpful AI assistant. Always provide clear and concise responses to the user's questions.Your responses must be very consize and small. Always use your tools to provide the best possible response to the user.Avoid giving long responses to the user's questions.For any query related to Keep Me, use you tool"
39
  self.tools = _OPENAI_TOOLS
40
 
41
  async def __aenter__(self):
 
67
  self.session_url, json=payload, headers=headers
68
  )
69
  response.raise_for_status()
 
70
  return response.json()
71
+
72
+ async def create_webrtc_connection(self, ephemeral_key, offer):
73
+ peer_connection = RTCPeerConnection()
74
+ offer_desc = RTCSessionDescription(sdp=offer["sdp"], type=offer["type"])
75
+ await peer_connection.setRemoteDescription(offer_desc)
76
+
77
+ async with httpx.AsyncClient() as client:
78
+ headers = {
79
+ "Authorization": f"Bearer {ephemeral_key}",
80
+ "Content-Type": "application/sdp",
81
+ }
82
+ response = await client.post(
83
+ self.webrtc_url, content=offer_desc.sdp, headers=headers
84
+ )
85
+ if not response.is_success:
86
+ error_data = response.json()
87
+ error_message = error_data.get("error", {}).get(
88
+ "message", response.reason_phrase
89
+ )
90
+ raise HTTPException(
91
+ status_code=response.status_code, detail=error_message
92
+ )
93
+ answer_sdp = response.text
94
+ return {"answer": {"sdp": answer_sdp, "type": "answer"}}