Kotta commited on
Commit ·
e8a6b78
1
Parent(s): 8189d77
feature(#9): firebase key management
Browse files- Brain/src/common/utils.py +3 -2
- Brain/src/firebase/cloudmessage.py +71 -59
- Brain/src/firebase/firebase.py +24 -3
- Brain/src/model/req_model.py +1 -1
- Brain/src/model/requests/request_model.py +0 -8
- Brain/src/rising_plugin/guardrails-config/actions/actions.py +11 -33
- Brain/src/rising_plugin/risingplugin.py +88 -55
- Brain/src/router/api.py +27 -5
- Brain/src/router/train_router.py +30 -7
- Brain/src/service/feedback_service.py +7 -1
- Brain/src/service/train_service.py +21 -2
- app.py +0 -2
Brain/src/common/utils.py
CHANGED
|
@@ -3,6 +3,7 @@ import os
|
|
| 3 |
import re
|
| 4 |
|
| 5 |
from firebase_admin import credentials
|
|
|
|
| 6 |
|
| 7 |
# env variables
|
| 8 |
DEFAULT_HOST_NAME = "test3-83ffc.appspot.com"
|
|
@@ -41,14 +42,14 @@ AUTH_TOKEN = os.getenv("TWILIO_AUTH_TOKEN")
|
|
| 41 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
| 42 |
|
| 43 |
|
| 44 |
-
def get_firebase_cred():
|
| 45 |
if os.path.exists("Brain/firebase_cred.json"):
|
| 46 |
file = open("Brain/firebase_cred.json")
|
| 47 |
cred = json.load(file)
|
| 48 |
file.close()
|
| 49 |
return credentials.Certificate(cred)
|
| 50 |
else:
|
| 51 |
-
cred = json.loads(
|
| 52 |
return credentials.Certificate(cred)
|
| 53 |
|
| 54 |
|
|
|
|
| 3 |
import re
|
| 4 |
|
| 5 |
from firebase_admin import credentials
|
| 6 |
+
from Brain.src.model.req_model import ReqModel
|
| 7 |
|
| 8 |
# env variables
|
| 9 |
DEFAULT_HOST_NAME = "test3-83ffc.appspot.com"
|
|
|
|
| 42 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
| 43 |
|
| 44 |
|
| 45 |
+
def get_firebase_cred(setting: ReqModel):
|
| 46 |
if os.path.exists("Brain/firebase_cred.json"):
|
| 47 |
file = open("Brain/firebase_cred.json")
|
| 48 |
cred = json.load(file)
|
| 49 |
file.close()
|
| 50 |
return credentials.Certificate(cred)
|
| 51 |
else:
|
| 52 |
+
cred = json.loads(setting.firebase_key)
|
| 53 |
return credentials.Certificate(cred)
|
| 54 |
|
| 55 |
|
Brain/src/firebase/cloudmessage.py
CHANGED
|
@@ -3,72 +3,84 @@ import sys
|
|
| 3 |
import json
|
| 4 |
|
| 5 |
import traceback
|
| 6 |
-
from
|
| 7 |
|
| 8 |
-
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
for doc in docs:
|
| 16 |
-
registeration_tokens.append(doc.to_dict()["token"])
|
| 17 |
-
return registeration_tokens
|
| 18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
-
def exception_detail(e):
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
)
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
default_sound=True,
|
| 45 |
)
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
)
|
| 51 |
-
|
| 52 |
-
multi_msg = messaging.MulticastMessage(
|
| 53 |
-
notification=notify,
|
| 54 |
-
tokens=token_list,
|
| 55 |
-
data={} if "route" not in notification else {"direct": notification["route"]},
|
| 56 |
-
android=messaging.AndroidConfig(notification=android_notify, priority="high"),
|
| 57 |
-
apns=messaging.APNSConfig(
|
| 58 |
-
payload=messaging.APNSPayload(
|
| 59 |
-
messaging.Aps(sound=messaging.CriticalSound("default", volume=1.0))
|
| 60 |
-
)
|
| 61 |
-
),
|
| 62 |
-
)
|
| 63 |
-
response = messaging.send_multicast(multi_msg)
|
| 64 |
-
failed_tokens = []
|
| 65 |
-
if response.failure_count > 0:
|
| 66 |
-
responses = response.responses
|
| 67 |
-
for idx, resp in enumerate(responses):
|
| 68 |
-
if not resp.success:
|
| 69 |
-
# The order of responses corresponds to the order of the registration tokens.
|
| 70 |
-
failed_tokens.append(token_list[idx])
|
| 71 |
-
print("List of tokens that caused failures: {0}".format(failed_tokens))
|
| 72 |
-
return True, "send to {} devices, with {} successed, with {} failed.".format(
|
| 73 |
-
len(token_list), response.success_count, response.failure_count
|
| 74 |
-
)
|
|
|
|
| 3 |
import json
|
| 4 |
|
| 5 |
import traceback
|
| 6 |
+
from typing import Any
|
| 7 |
|
| 8 |
+
import firebase_admin
|
| 9 |
+
from firebase_admin import messaging, firestore
|
| 10 |
|
| 11 |
|
| 12 |
+
class CloudMessage:
|
| 13 |
+
def __init__(self, firebase_app: firebase_admin.App):
|
| 14 |
+
self.firebase_app = firebase_app
|
| 15 |
+
self.db = firestore.client(app=firebase_app)
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
+
def get_tokens(self):
|
| 18 |
+
users_ref = self.db.collection("users")
|
| 19 |
+
docs = users_ref.stream()
|
| 20 |
+
registeration_tokens = []
|
| 21 |
+
for doc in docs:
|
| 22 |
+
registeration_tokens.append(doc.to_dict()["token"])
|
| 23 |
+
return registeration_tokens
|
| 24 |
|
| 25 |
+
def exception_detail(self, e):
|
| 26 |
+
error_class = e.__class__.__name__
|
| 27 |
+
detail = e.args[0]
|
| 28 |
+
cl, exc, tb = sys.exc_info()
|
| 29 |
+
lastCallStack = traceback.extract_tb(tb)[-1]
|
| 30 |
+
fileName = lastCallStack[0]
|
| 31 |
+
lineNum = lastCallStack[1]
|
| 32 |
+
funcName = lastCallStack[2]
|
| 33 |
+
errMsg = 'File "{}", line {}, in {}: [{}] {}'.format(
|
| 34 |
+
fileName, lineNum, funcName, error_class, detail
|
| 35 |
+
)
|
| 36 |
+
return errMsg
|
| 37 |
|
| 38 |
+
def send_message(self, notification, token_list):
|
| 39 |
+
if token_list == []:
|
| 40 |
+
return False, "token_list empty"
|
| 41 |
+
if notification.get("title") not in [None, ""]:
|
| 42 |
+
notify = messaging.Notification(
|
| 43 |
+
title=notification.get("title"),
|
| 44 |
+
body=notification.get("content", ""),
|
| 45 |
+
)
|
| 46 |
+
android_notify = messaging.AndroidNotification(
|
| 47 |
+
title=notification.get("title"),
|
| 48 |
+
body=notification.get("content", ""),
|
| 49 |
+
default_sound=True,
|
| 50 |
+
)
|
| 51 |
+
else:
|
| 52 |
+
notify = messaging.Notification(body=notification.get("content", ""))
|
| 53 |
+
android_notify = messaging.AndroidNotification(
|
| 54 |
+
body=notification.get("content", ""), default_sound=True
|
| 55 |
+
)
|
| 56 |
|
| 57 |
+
multi_msg = messaging.MulticastMessage(
|
| 58 |
+
notification=notify,
|
| 59 |
+
tokens=token_list,
|
| 60 |
+
data={}
|
| 61 |
+
if "route" not in notification
|
| 62 |
+
else {"direct": notification["route"]},
|
| 63 |
+
android=messaging.AndroidConfig(
|
| 64 |
+
notification=android_notify, priority="high"
|
| 65 |
+
),
|
| 66 |
+
apns=messaging.APNSConfig(
|
| 67 |
+
payload=messaging.APNSPayload(
|
| 68 |
+
messaging.Aps(sound=messaging.CriticalSound("default", volume=1.0))
|
| 69 |
+
)
|
| 70 |
+
),
|
| 71 |
)
|
| 72 |
+
response = messaging.send_multicast(
|
| 73 |
+
multi_msg,
|
| 74 |
+
app=self.firebase_app,
|
|
|
|
| 75 |
)
|
| 76 |
+
failed_tokens = []
|
| 77 |
+
if response.failure_count > 0:
|
| 78 |
+
responses = response.responses
|
| 79 |
+
for idx, resp in enumerate(responses):
|
| 80 |
+
if not resp.success:
|
| 81 |
+
# The order of responses corresponds to the order of the registration tokens.
|
| 82 |
+
failed_tokens.append(token_list[idx])
|
| 83 |
+
print("List of tokens that caused failures: {0}".format(failed_tokens))
|
| 84 |
+
return True, "send to {} devices, with {} successed, with {} failed.".format(
|
| 85 |
+
len(token_list), response.success_count, response.failure_count
|
| 86 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Brain/src/firebase/firebase.py
CHANGED
|
@@ -1,8 +1,29 @@
|
|
|
|
|
|
|
|
| 1 |
import firebase_admin
|
|
|
|
|
|
|
| 2 |
from Brain.src.common.utils import get_firebase_cred, FIREBASE_STORAGE_BUCKET
|
|
|
|
|
|
|
| 3 |
|
| 4 |
|
| 5 |
-
def initialize_app():
|
| 6 |
-
firebase_admin.initialize_app(
|
| 7 |
-
get_firebase_cred(),
|
|
|
|
|
|
|
| 8 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
import firebase_admin
|
| 4 |
+
|
| 5 |
+
from Brain.src.common.assembler import Assembler
|
| 6 |
from Brain.src.common.utils import get_firebase_cred, FIREBASE_STORAGE_BUCKET
|
| 7 |
+
from Brain.src.model.req_model import ReqModel
|
| 8 |
+
from Brain.src.model.requests.request_model import BasicReq
|
| 9 |
|
| 10 |
|
| 11 |
+
def initialize_app(setting: ReqModel) -> firebase_admin.App:
|
| 12 |
+
return firebase_admin.initialize_app(
|
| 13 |
+
get_firebase_cred(setting),
|
| 14 |
+
{"storageBucket": FIREBASE_STORAGE_BUCKET},
|
| 15 |
+
name=get_firebase_admin_name(setting.uuid),
|
| 16 |
)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def get_firebase_admin_name(uuid: str = ""):
|
| 20 |
+
return f"firebase_admin_{uuid}"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def firebase_admin_with_setting(data: BasicReq):
|
| 24 |
+
# firebase admin init
|
| 25 |
+
assembler = Assembler()
|
| 26 |
+
setting = assembler.to_req_model(data)
|
| 27 |
+
|
| 28 |
+
firebase_app = initialize_app(setting)
|
| 29 |
+
return setting, firebase_app
|
Brain/src/model/req_model.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
from typing import Any
|
| 2 |
|
| 3 |
-
|
| 4 |
|
| 5 |
|
| 6 |
class ReqModel:
|
|
|
|
| 1 |
from typing import Any
|
| 2 |
|
| 3 |
+
DEFAULT_HOST_NAME = "test3-83ffc.appspot.com"
|
| 4 |
|
| 5 |
|
| 6 |
class ReqModel:
|
Brain/src/model/requests/request_model.py
CHANGED
|
@@ -4,14 +4,6 @@ from pydantic import BaseModel
|
|
| 4 |
from fastapi import Depends, Request, HTTPException
|
| 5 |
from user_agents import parse
|
| 6 |
|
| 7 |
-
from Brain.src.common.utils import (
|
| 8 |
-
DEFAULT_HOST_NAME,
|
| 9 |
-
OPENAI_API_KEY,
|
| 10 |
-
PINECONE_KEY,
|
| 11 |
-
FIREBASE_ENV,
|
| 12 |
-
PINECONE_ENV,
|
| 13 |
-
)
|
| 14 |
-
|
| 15 |
"""user-agent management"""
|
| 16 |
|
| 17 |
|
|
|
|
| 4 |
from fastapi import Depends, Request, HTTPException
|
| 5 |
from user_agents import parse
|
| 6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
"""user-agent management"""
|
| 8 |
|
| 9 |
|
Brain/src/rising_plugin/guardrails-config/actions/actions.py
CHANGED
|
@@ -80,46 +80,24 @@ query is json string with below format
|
|
| 80 |
async def general_question(query):
|
| 81 |
"""init falcon model"""
|
| 82 |
falcon_llm = FalconLLM()
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
try:
|
| 87 |
json_query = json.loads(query)
|
| 88 |
except Exception as ex:
|
| 89 |
raise BrainException(BrainException.JSON_PARSING_ISSUE_MSG)
|
| 90 |
-
"""step 0-->: parsing parms from the json query"""
|
| 91 |
query = json_query["query"]
|
| 92 |
-
model = json_query["model"]
|
| 93 |
-
uuid = json_query["uuid"]
|
| 94 |
image_search = json_query["image_search"]
|
|
|
|
|
|
|
| 95 |
setting = ReqModel(json_query["setting"])
|
| 96 |
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
query_result = get_embed(query)
|
| 100 |
-
relatedness_data = index.query(
|
| 101 |
-
vector=query_result,
|
| 102 |
-
top_k=1,
|
| 103 |
-
include_values=False,
|
| 104 |
-
namespace=train_service.get_pinecone_index_train_namespace(),
|
| 105 |
-
)
|
| 106 |
-
|
| 107 |
-
if len(relatedness_data["matches"]) == 0:
|
| 108 |
-
return str({"program": "message", "content": ""})
|
| 109 |
-
document_id = relatedness_data["matches"][0]["id"]
|
| 110 |
-
docs = []
|
| 111 |
-
document = train_service.read_one_document(document_id)
|
| 112 |
-
docs.append(Document(page_content=document["page_content"], metadata=""))
|
| 113 |
-
|
| 114 |
""" 1. calling gpt model to categorize for all message"""
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
)
|
| 119 |
-
else:
|
| 120 |
-
chain_data = get_llm_chain(model=DEFAULT_GPT_MODEL, setting=setting).run(
|
| 121 |
-
input_documents=docs, question=query
|
| 122 |
-
)
|
| 123 |
try:
|
| 124 |
result = json.loads(chain_data)
|
| 125 |
# check image query with only its text
|
|
@@ -130,8 +108,8 @@ async def general_question(query):
|
|
| 130 |
}
|
| 131 |
""" 2. check program is message to handle it with falcon llm """
|
| 132 |
if result["program"] == "message":
|
| 133 |
-
|
| 134 |
-
|
| 135 |
return str(result)
|
| 136 |
except ValueError as e:
|
| 137 |
# Check sms and browser query
|
|
|
|
| 80 |
async def general_question(query):
|
| 81 |
"""init falcon model"""
|
| 82 |
falcon_llm = FalconLLM()
|
| 83 |
+
docs = []
|
| 84 |
+
|
| 85 |
+
"""step 0-->: parsing parms from the json query"""
|
| 86 |
try:
|
| 87 |
json_query = json.loads(query)
|
| 88 |
except Exception as ex:
|
| 89 |
raise BrainException(BrainException.JSON_PARSING_ISSUE_MSG)
|
|
|
|
| 90 |
query = json_query["query"]
|
|
|
|
|
|
|
| 91 |
image_search = json_query["image_search"]
|
| 92 |
+
page_content = json_query["page_content"]
|
| 93 |
+
document_id = json_query["document_id"]
|
| 94 |
setting = ReqModel(json_query["setting"])
|
| 95 |
|
| 96 |
+
docs.append(Document(page_content=page_content, metadata=""))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
""" 1. calling gpt model to categorize for all message"""
|
| 98 |
+
chain_data = get_llm_chain(model=DEFAULT_GPT_MODEL, setting=setting).run(
|
| 99 |
+
input_documents=docs, question=query
|
| 100 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
try:
|
| 102 |
result = json.loads(chain_data)
|
| 103 |
# check image query with only its text
|
|
|
|
| 108 |
}
|
| 109 |
""" 2. check program is message to handle it with falcon llm """
|
| 110 |
if result["program"] == "message":
|
| 111 |
+
"""FALCON_7B:"""
|
| 112 |
+
result["content"] = falcon_llm.query(question=query)
|
| 113 |
return str(result)
|
| 114 |
except ValueError as e:
|
| 115 |
# Check sms and browser query
|
Brain/src/rising_plugin/risingplugin.py
CHANGED
|
@@ -1,12 +1,15 @@
|
|
| 1 |
import os
|
| 2 |
import json
|
| 3 |
import datetime
|
|
|
|
|
|
|
| 4 |
import openai
|
| 5 |
import replicate
|
| 6 |
import textwrap
|
| 7 |
|
| 8 |
from typing import Any
|
| 9 |
|
|
|
|
| 10 |
from langchain.chains.question_answering import load_qa_chain
|
| 11 |
from nemoguardrails.rails import LLMRails, RailsConfig
|
| 12 |
|
|
@@ -14,12 +17,16 @@ from langchain.chat_models import ChatOpenAI
|
|
| 14 |
|
| 15 |
from firebase_admin import storage
|
| 16 |
|
|
|
|
| 17 |
from .llm.llms import get_llm, GPT_4, FALCON_7B
|
|
|
|
|
|
|
| 18 |
from ..common.utils import (
|
| 19 |
OPENAI_API_KEY,
|
| 20 |
FIREBASE_STORAGE_ROOT,
|
| 21 |
DEFAULT_GPT_MODEL,
|
| 22 |
parseJsonFromCompletion,
|
|
|
|
| 23 |
)
|
| 24 |
from .image_embedding import (
|
| 25 |
query_image_text,
|
|
@@ -27,6 +34,7 @@ from .image_embedding import (
|
|
| 27 |
)
|
| 28 |
from ..model.req_model import ReqModel
|
| 29 |
from ..model.requests.request_model import BasicReq
|
|
|
|
| 30 |
|
| 31 |
# Give the path to the folder containing the rails
|
| 32 |
file_path = os.path.dirname(os.path.abspath(__file__))
|
|
@@ -42,28 +50,65 @@ def getChunks(query: str):
|
|
| 42 |
)
|
| 43 |
|
| 44 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
def processLargeText(
|
| 46 |
setting: ReqModel,
|
| 47 |
app: any,
|
| 48 |
chunks: any,
|
| 49 |
-
|
| 50 |
-
uuid: str = "",
|
| 51 |
image_search: bool = True,
|
| 52 |
):
|
| 53 |
if len(chunks) == 1:
|
| 54 |
-
message =
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
query=chunks[0],
|
| 61 |
-
model=model,
|
| 62 |
-
uuid=uuid,
|
| 63 |
-
image_search=image_search,
|
| 64 |
-
),
|
| 65 |
-
}
|
| 66 |
-
]
|
| 67 |
)
|
| 68 |
result = json.dumps(message["content"])
|
| 69 |
return parseJsonFromCompletion(result)
|
|
@@ -92,19 +137,12 @@ def processLargeText(
|
|
| 92 |
+ "]\n"
|
| 93 |
+ "Remember not answering yet. Just acknowledge you received this part with the message 'Part 1/10 received' and wait for the next part."
|
| 94 |
)
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
query=chunk_query,
|
| 102 |
-
model=model,
|
| 103 |
-
uuid=uuid,
|
| 104 |
-
image_search=image_search,
|
| 105 |
-
),
|
| 106 |
-
}
|
| 107 |
-
]
|
| 108 |
)
|
| 109 |
else:
|
| 110 |
last_query = (
|
|
@@ -120,19 +158,12 @@ def processLargeText(
|
|
| 120 |
+ "]\n"
|
| 121 |
+ "ALL PART SENT. Now you can continue processing the request."
|
| 122 |
)
|
| 123 |
-
message =
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
query=last_query,
|
| 130 |
-
model=model,
|
| 131 |
-
uuid=uuid,
|
| 132 |
-
image_search=image_search,
|
| 133 |
-
),
|
| 134 |
-
}
|
| 135 |
-
]
|
| 136 |
)
|
| 137 |
result = json.dumps(message["content"])
|
| 138 |
return parseJsonFromCompletion(result)
|
|
@@ -142,8 +173,7 @@ def processLargeText(
|
|
| 142 |
def getCompletion(
|
| 143 |
query: str,
|
| 144 |
setting: ReqModel,
|
| 145 |
-
|
| 146 |
-
uuid: str = "",
|
| 147 |
image_search: bool = True,
|
| 148 |
):
|
| 149 |
llm = get_llm(model=DEFAULT_GPT_MODEL, setting=setting).get_llm()
|
|
@@ -157,9 +187,8 @@ def getCompletion(
|
|
| 157 |
setting=setting,
|
| 158 |
app=app,
|
| 159 |
chunks=chunks,
|
| 160 |
-
model=model,
|
| 161 |
-
uuid=uuid,
|
| 162 |
image_search=image_search,
|
|
|
|
| 163 |
)
|
| 164 |
|
| 165 |
|
|
@@ -173,12 +202,10 @@ def getCompletionOnly(
|
|
| 173 |
return chain_data
|
| 174 |
|
| 175 |
|
| 176 |
-
def query_image_ask(image_content, message,
|
| 177 |
prompt_template = get_prompt_image_with_message(image_content, message)
|
| 178 |
try:
|
| 179 |
-
data = getCompletion(
|
| 180 |
-
query=prompt_template, uuid=uuid, image_search=False, setting=setting
|
| 181 |
-
)
|
| 182 |
# chain_data = json.loads(data.replace("'", '"'))
|
| 183 |
# chain_data = json.loads(data)
|
| 184 |
if data["program"] == "image":
|
|
@@ -233,15 +260,17 @@ response:
|
|
| 233 |
|
| 234 |
|
| 235 |
# Define a content filter function
|
| 236 |
-
def filter_guardrails(setting:
|
| 237 |
-
llm = ChatOpenAI(
|
|
|
|
|
|
|
| 238 |
app = LLMRails(config, llm)
|
| 239 |
|
| 240 |
# split query with chunks
|
| 241 |
chunks = getChunks(query)
|
| 242 |
|
| 243 |
# get message from guardrails
|
| 244 |
-
message = processLargeText(app=app, chunks=chunks,
|
| 245 |
|
| 246 |
if (
|
| 247 |
message
|
|
@@ -260,14 +289,18 @@ compose json_string for rails input with its arguments
|
|
| 260 |
|
| 261 |
|
| 262 |
def rails_input_with_args(
|
| 263 |
-
setting: ReqModel,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 264 |
) -> str:
|
| 265 |
# convert json with params for rails.
|
| 266 |
json_query_with_params = {
|
| 267 |
"query": query,
|
| 268 |
-
"model": model,
|
| 269 |
-
"uuid": uuid,
|
| 270 |
"image_search": image_search,
|
|
|
|
|
|
|
| 271 |
"setting": setting.to_json(),
|
| 272 |
}
|
| 273 |
return json.dumps(json_query_with_params)
|
|
|
|
| 1 |
import os
|
| 2 |
import json
|
| 3 |
import datetime
|
| 4 |
+
|
| 5 |
+
import firebase_admin
|
| 6 |
import openai
|
| 7 |
import replicate
|
| 8 |
import textwrap
|
| 9 |
|
| 10 |
from typing import Any
|
| 11 |
|
| 12 |
+
from langchain import LLMChain
|
| 13 |
from langchain.chains.question_answering import load_qa_chain
|
| 14 |
from nemoguardrails.rails import LLMRails, RailsConfig
|
| 15 |
|
|
|
|
| 17 |
|
| 18 |
from firebase_admin import storage
|
| 19 |
|
| 20 |
+
from .csv_embed import get_embed
|
| 21 |
from .llm.llms import get_llm, GPT_4, FALCON_7B
|
| 22 |
+
from .pinecone_engine import init_pinecone
|
| 23 |
+
from ..common.brain_exception import BrainException
|
| 24 |
from ..common.utils import (
|
| 25 |
OPENAI_API_KEY,
|
| 26 |
FIREBASE_STORAGE_ROOT,
|
| 27 |
DEFAULT_GPT_MODEL,
|
| 28 |
parseJsonFromCompletion,
|
| 29 |
+
PINECONE_INDEX_NAME,
|
| 30 |
)
|
| 31 |
from .image_embedding import (
|
| 32 |
query_image_text,
|
|
|
|
| 34 |
)
|
| 35 |
from ..model.req_model import ReqModel
|
| 36 |
from ..model.requests.request_model import BasicReq
|
| 37 |
+
from ..service.train_service import TrainService
|
| 38 |
|
| 39 |
# Give the path to the folder containing the rails
|
| 40 |
file_path = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
| 50 |
)
|
| 51 |
|
| 52 |
|
| 53 |
+
def llm_rails(
|
| 54 |
+
setting: ReqModel,
|
| 55 |
+
rails_app: any,
|
| 56 |
+
firebase_app: firebase_admin.App,
|
| 57 |
+
query: str,
|
| 58 |
+
image_search: bool = True,
|
| 59 |
+
) -> Any:
|
| 60 |
+
"""step 0: convert string to json"""
|
| 61 |
+
index = init_pinecone(PINECONE_INDEX_NAME)
|
| 62 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 63 |
+
|
| 64 |
+
"""step 1: handle with gpt-4"""
|
| 65 |
+
|
| 66 |
+
query_result = get_embed(query)
|
| 67 |
+
relatedness_data = index.query(
|
| 68 |
+
vector=query_result,
|
| 69 |
+
top_k=1,
|
| 70 |
+
include_values=False,
|
| 71 |
+
namespace=train_service.get_pinecone_index_train_namespace(),
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
if len(relatedness_data["matches"]) == 0:
|
| 75 |
+
return str({"program": "message", "content": ""})
|
| 76 |
+
document_id = relatedness_data["matches"][0]["id"]
|
| 77 |
+
|
| 78 |
+
document = train_service.read_one_document(document_id)
|
| 79 |
+
page_content = document["page_content"]
|
| 80 |
+
|
| 81 |
+
message = rails_app.generate(
|
| 82 |
+
messages=[
|
| 83 |
+
{
|
| 84 |
+
"role": "user",
|
| 85 |
+
"content": rails_input_with_args(
|
| 86 |
+
setting=setting,
|
| 87 |
+
query=query,
|
| 88 |
+
image_search=image_search,
|
| 89 |
+
page_content=page_content,
|
| 90 |
+
document_id=document_id,
|
| 91 |
+
),
|
| 92 |
+
}
|
| 93 |
+
]
|
| 94 |
+
)
|
| 95 |
+
return message
|
| 96 |
+
|
| 97 |
+
|
| 98 |
def processLargeText(
|
| 99 |
setting: ReqModel,
|
| 100 |
app: any,
|
| 101 |
chunks: any,
|
| 102 |
+
firebase_app: firebase_admin.App,
|
|
|
|
| 103 |
image_search: bool = True,
|
| 104 |
):
|
| 105 |
if len(chunks) == 1:
|
| 106 |
+
message = llm_rails(
|
| 107 |
+
setting=setting,
|
| 108 |
+
rails_app=app,
|
| 109 |
+
firebase_app=firebase_app,
|
| 110 |
+
query=chunks[0],
|
| 111 |
+
image_search=image_search,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 112 |
)
|
| 113 |
result = json.dumps(message["content"])
|
| 114 |
return parseJsonFromCompletion(result)
|
|
|
|
| 137 |
+ "]\n"
|
| 138 |
+ "Remember not answering yet. Just acknowledge you received this part with the message 'Part 1/10 received' and wait for the next part."
|
| 139 |
)
|
| 140 |
+
llm_rails(
|
| 141 |
+
setting=setting,
|
| 142 |
+
rails_app=app,
|
| 143 |
+
firebase_app=firebase_app,
|
| 144 |
+
query=chunk_query,
|
| 145 |
+
image_search=image_search,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
)
|
| 147 |
else:
|
| 148 |
last_query = (
|
|
|
|
| 158 |
+ "]\n"
|
| 159 |
+ "ALL PART SENT. Now you can continue processing the request."
|
| 160 |
)
|
| 161 |
+
message = llm_rails(
|
| 162 |
+
setting=setting,
|
| 163 |
+
rails_app=app,
|
| 164 |
+
firebase_app=firebase_app,
|
| 165 |
+
query=last_query,
|
| 166 |
+
image_search=image_search,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
)
|
| 168 |
result = json.dumps(message["content"])
|
| 169 |
return parseJsonFromCompletion(result)
|
|
|
|
| 173 |
def getCompletion(
|
| 174 |
query: str,
|
| 175 |
setting: ReqModel,
|
| 176 |
+
firebase_app: firebase_admin.App,
|
|
|
|
| 177 |
image_search: bool = True,
|
| 178 |
):
|
| 179 |
llm = get_llm(model=DEFAULT_GPT_MODEL, setting=setting).get_llm()
|
|
|
|
| 187 |
setting=setting,
|
| 188 |
app=app,
|
| 189 |
chunks=chunks,
|
|
|
|
|
|
|
| 190 |
image_search=image_search,
|
| 191 |
+
firebase_app=firebase_app,
|
| 192 |
)
|
| 193 |
|
| 194 |
|
|
|
|
| 202 |
return chain_data
|
| 203 |
|
| 204 |
|
| 205 |
+
def query_image_ask(image_content, message, setting: ReqModel):
|
| 206 |
prompt_template = get_prompt_image_with_message(image_content, message)
|
| 207 |
try:
|
| 208 |
+
data = getCompletion(query=prompt_template, image_search=False, setting=setting)
|
|
|
|
|
|
|
| 209 |
# chain_data = json.loads(data.replace("'", '"'))
|
| 210 |
# chain_data = json.loads(data)
|
| 211 |
if data["program"] == "image":
|
|
|
|
| 260 |
|
| 261 |
|
| 262 |
# Define a content filter function
|
| 263 |
+
def filter_guardrails(setting: ReqModel, query: str):
|
| 264 |
+
llm = ChatOpenAI(
|
| 265 |
+
model_name=DEFAULT_GPT_MODEL, temperature=0, openai_api_key=setting.openai_key
|
| 266 |
+
)
|
| 267 |
app = LLMRails(config, llm)
|
| 268 |
|
| 269 |
# split query with chunks
|
| 270 |
chunks = getChunks(query)
|
| 271 |
|
| 272 |
# get message from guardrails
|
| 273 |
+
message = processLargeText(app=app, chunks=chunks, setting=setting)
|
| 274 |
|
| 275 |
if (
|
| 276 |
message
|
|
|
|
| 289 |
|
| 290 |
|
| 291 |
def rails_input_with_args(
|
| 292 |
+
setting: ReqModel,
|
| 293 |
+
query: str,
|
| 294 |
+
image_search: bool,
|
| 295 |
+
page_content: str,
|
| 296 |
+
document_id: str,
|
| 297 |
) -> str:
|
| 298 |
# convert json with params for rails.
|
| 299 |
json_query_with_params = {
|
| 300 |
"query": query,
|
|
|
|
|
|
|
| 301 |
"image_search": image_search,
|
| 302 |
+
"page_content": page_content,
|
| 303 |
+
"document_id": document_id,
|
| 304 |
"setting": setting.to_json(),
|
| 305 |
}
|
| 306 |
return json.dumps(json_query_with_params)
|
Brain/src/router/api.py
CHANGED
|
@@ -4,6 +4,11 @@ import os
|
|
| 4 |
from Brain.src.common.assembler import Assembler
|
| 5 |
from Brain.src.common.brain_exception import BrainException
|
| 6 |
from Brain.src.common.utils import ProgramType
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
from Brain.src.model.image_model import ImageModel
|
| 8 |
from Brain.src.model.requests.request_model import (
|
| 9 |
Notification,
|
|
@@ -23,7 +28,7 @@ from Brain.src.rising_plugin.risingplugin import (
|
|
| 23 |
query_image_ask,
|
| 24 |
handle_chat_completion,
|
| 25 |
)
|
| 26 |
-
from Brain.src.firebase.cloudmessage import
|
| 27 |
from Brain.src.rising_plugin.image_embedding import embed_image_text, query_image_text
|
| 28 |
|
| 29 |
from Brain.src.logs import logger
|
|
@@ -60,6 +65,13 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 60 |
def send_notification(
|
| 61 |
data: Notification, client_info: ClientInfo = Depends(get_client_info)
|
| 62 |
):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
query = data.message
|
| 64 |
token = data.token
|
| 65 |
uuid = data.uuid
|
|
@@ -68,7 +80,7 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 68 |
query = f"{query} in a web browser"
|
| 69 |
|
| 70 |
result = getCompletion(
|
| 71 |
-
query=query,
|
| 72 |
)
|
| 73 |
|
| 74 |
# check contact querying
|
|
@@ -82,7 +94,7 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 82 |
|
| 83 |
notification = {"title": "alert", "content": json.dumps(result)}
|
| 84 |
|
| 85 |
-
state, value = send_message(notification, [token])
|
| 86 |
return assembler.to_response(200, value, result)
|
| 87 |
except Exception as e:
|
| 88 |
logger.error(
|
|
@@ -105,6 +117,13 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 105 |
|
| 106 |
@router.post("/uploadImage")
|
| 107 |
def upload_image(data: UploadImage):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
image_model = ImageModel()
|
| 109 |
token = data.token
|
| 110 |
|
|
@@ -118,7 +137,7 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 118 |
|
| 119 |
notification = {"title": "alert", "content": embed_result}
|
| 120 |
|
| 121 |
-
state, value = send_message(notification, [token])
|
| 122 |
return assembler.to_response(200, value, image_model.to_json())
|
| 123 |
|
| 124 |
"""@generator.response(
|
|
@@ -135,6 +154,9 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 135 |
|
| 136 |
@router.post("/image_relatedness")
|
| 137 |
def image_relatedness(data: ImageRelatedness):
|
|
|
|
|
|
|
|
|
|
| 138 |
image_name = data.image_name
|
| 139 |
message = data.message
|
| 140 |
token = data.token
|
|
@@ -157,7 +179,7 @@ def construct_blueprint_api() -> APIRouter:
|
|
| 157 |
print("image_relatedness parsing error for message chain data")
|
| 158 |
|
| 159 |
notification = {"title": "alert", "content": json.dumps(image_response)}
|
| 160 |
-
state, value = send_message(notification, [token])
|
| 161 |
|
| 162 |
return assembler.to_response(
|
| 163 |
code=200,
|
|
|
|
| 4 |
from Brain.src.common.assembler import Assembler
|
| 5 |
from Brain.src.common.brain_exception import BrainException
|
| 6 |
from Brain.src.common.utils import ProgramType
|
| 7 |
+
from Brain.src.firebase.firebase import (
|
| 8 |
+
initialize_app,
|
| 9 |
+
get_firebase_admin_name,
|
| 10 |
+
firebase_admin_with_setting,
|
| 11 |
+
)
|
| 12 |
from Brain.src.model.image_model import ImageModel
|
| 13 |
from Brain.src.model.requests.request_model import (
|
| 14 |
Notification,
|
|
|
|
| 28 |
query_image_ask,
|
| 29 |
handle_chat_completion,
|
| 30 |
)
|
| 31 |
+
from Brain.src.firebase.cloudmessage import CloudMessage
|
| 32 |
from Brain.src.rising_plugin.image_embedding import embed_image_text, query_image_text
|
| 33 |
|
| 34 |
from Brain.src.logs import logger
|
|
|
|
| 65 |
def send_notification(
|
| 66 |
data: Notification, client_info: ClientInfo = Depends(get_client_info)
|
| 67 |
):
|
| 68 |
+
# firebase admin init
|
| 69 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 70 |
+
|
| 71 |
+
# cloud message
|
| 72 |
+
cloud_message = CloudMessage(firebase_app=firebase_app)
|
| 73 |
+
|
| 74 |
+
# parsing params
|
| 75 |
query = data.message
|
| 76 |
token = data.token
|
| 77 |
uuid = data.uuid
|
|
|
|
| 80 |
query = f"{query} in a web browser"
|
| 81 |
|
| 82 |
result = getCompletion(
|
| 83 |
+
query=query, setting=assembler.to_req_model(data), firebase_app=firebase_app
|
| 84 |
)
|
| 85 |
|
| 86 |
# check contact querying
|
|
|
|
| 94 |
|
| 95 |
notification = {"title": "alert", "content": json.dumps(result)}
|
| 96 |
|
| 97 |
+
state, value = cloud_message.send_message(notification, [token])
|
| 98 |
return assembler.to_response(200, value, result)
|
| 99 |
except Exception as e:
|
| 100 |
logger.error(
|
|
|
|
| 117 |
|
| 118 |
@router.post("/uploadImage")
|
| 119 |
def upload_image(data: UploadImage):
|
| 120 |
+
# cloud message
|
| 121 |
+
cloud_message = CloudMessage()
|
| 122 |
+
# parsing params
|
| 123 |
+
setting = assembler.to_req_model(data)
|
| 124 |
+
# firebase admin init
|
| 125 |
+
initialize_app(setting)
|
| 126 |
+
|
| 127 |
image_model = ImageModel()
|
| 128 |
token = data.token
|
| 129 |
|
|
|
|
| 137 |
|
| 138 |
notification = {"title": "alert", "content": embed_result}
|
| 139 |
|
| 140 |
+
state, value = cloud_message.send_message(notification, [token])
|
| 141 |
return assembler.to_response(200, value, image_model.to_json())
|
| 142 |
|
| 143 |
"""@generator.response(
|
|
|
|
| 154 |
|
| 155 |
@router.post("/image_relatedness")
|
| 156 |
def image_relatedness(data: ImageRelatedness):
|
| 157 |
+
# cloud message
|
| 158 |
+
cloud_message = CloudMessage()
|
| 159 |
+
# parsing params
|
| 160 |
image_name = data.image_name
|
| 161 |
message = data.message
|
| 162 |
token = data.token
|
|
|
|
| 179 |
print("image_relatedness parsing error for message chain data")
|
| 180 |
|
| 181 |
notification = {"title": "alert", "content": json.dumps(image_response)}
|
| 182 |
+
state, value = cloud_message.send_message(notification, [token])
|
| 183 |
|
| 184 |
return assembler.to_response(
|
| 185 |
code=200,
|
Brain/src/router/train_router.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
| 1 |
from fastapi import APIRouter
|
| 2 |
|
| 3 |
from Brain.src.common.assembler import Assembler
|
|
|
|
| 4 |
from Brain.src.model.requests.request_model import (
|
| 5 |
Document,
|
|
|
|
| 6 |
)
|
| 7 |
from Brain.src.service.train_service import TrainService
|
| 8 |
|
|
@@ -13,15 +15,16 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 13 |
# Assembler
|
| 14 |
assembler = Assembler()
|
| 15 |
|
| 16 |
-
# Services
|
| 17 |
-
train_service = TrainService()
|
| 18 |
-
|
| 19 |
"""@generator.response(
|
| 20 |
status_code=200, schema={"message": "message", "result": "test_result"}
|
| 21 |
)"""
|
| 22 |
|
| 23 |
@router.get("")
|
| 24 |
-
def read_all_documents():
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
try:
|
| 26 |
result = train_service.read_all_documents()
|
| 27 |
except Exception as e:
|
|
@@ -32,7 +35,11 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 32 |
"page_content":"page_content"}} )"""
|
| 33 |
|
| 34 |
@router.get("/all")
|
| 35 |
-
def train_all_documents():
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
try:
|
| 37 |
result = train_service.train_all_documents()
|
| 38 |
except Exception as e:
|
|
@@ -43,7 +50,11 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 43 |
"page_content":"page_content"}} )"""
|
| 44 |
|
| 45 |
@router.get("/{document_id}")
|
| 46 |
-
def read_one_document(document_id: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
if document_id != "all":
|
| 48 |
try:
|
| 49 |
result = train_service.read_one_document(document_id)
|
|
@@ -63,6 +74,10 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 63 |
|
| 64 |
@router.post("")
|
| 65 |
def create_document_train(data: Document):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
try:
|
| 67 |
result = train_service.create_one_document(data.page_content)
|
| 68 |
except Exception as e:
|
|
@@ -84,6 +99,10 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 84 |
|
| 85 |
@router.put("")
|
| 86 |
def update_one_document(data: Document):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
try:
|
| 88 |
result = train_service.update_one_document(
|
| 89 |
data.document_id, data.page_content
|
|
@@ -104,7 +123,11 @@ def construct_blueprint_train_api() -> APIRouter:
|
|
| 104 |
@generator.response( status_code=200, schema={"message": "message", "result": {"document_id": "document_id"}} )"""
|
| 105 |
|
| 106 |
@router.delete("/{document_id}")
|
| 107 |
-
def delete_one_document(document_id: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
try:
|
| 109 |
result = train_service.delete_one_document(document_id)
|
| 110 |
except Exception as e:
|
|
|
|
| 1 |
from fastapi import APIRouter
|
| 2 |
|
| 3 |
from Brain.src.common.assembler import Assembler
|
| 4 |
+
from Brain.src.firebase.firebase import firebase_admin_with_setting
|
| 5 |
from Brain.src.model.requests.request_model import (
|
| 6 |
Document,
|
| 7 |
+
BasicReq,
|
| 8 |
)
|
| 9 |
from Brain.src.service.train_service import TrainService
|
| 10 |
|
|
|
|
| 15 |
# Assembler
|
| 16 |
assembler = Assembler()
|
| 17 |
|
|
|
|
|
|
|
|
|
|
| 18 |
"""@generator.response(
|
| 19 |
status_code=200, schema={"message": "message", "result": "test_result"}
|
| 20 |
)"""
|
| 21 |
|
| 22 |
@router.get("")
|
| 23 |
+
def read_all_documents(data: BasicReq):
|
| 24 |
+
# parsing params
|
| 25 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 26 |
+
# Services
|
| 27 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 28 |
try:
|
| 29 |
result = train_service.read_all_documents()
|
| 30 |
except Exception as e:
|
|
|
|
| 35 |
"page_content":"page_content"}} )"""
|
| 36 |
|
| 37 |
@router.get("/all")
|
| 38 |
+
def train_all_documents(data: BasicReq):
|
| 39 |
+
# parsing params
|
| 40 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 41 |
+
# Services
|
| 42 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 43 |
try:
|
| 44 |
result = train_service.train_all_documents()
|
| 45 |
except Exception as e:
|
|
|
|
| 50 |
"page_content":"page_content"}} )"""
|
| 51 |
|
| 52 |
@router.get("/{document_id}")
|
| 53 |
+
def read_one_document(document_id: str, data: BasicReq):
|
| 54 |
+
# parsing params
|
| 55 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 56 |
+
# Services
|
| 57 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 58 |
if document_id != "all":
|
| 59 |
try:
|
| 60 |
result = train_service.read_one_document(document_id)
|
|
|
|
| 74 |
|
| 75 |
@router.post("")
|
| 76 |
def create_document_train(data: Document):
|
| 77 |
+
# parsing params
|
| 78 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 79 |
+
# Services
|
| 80 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 81 |
try:
|
| 82 |
result = train_service.create_one_document(data.page_content)
|
| 83 |
except Exception as e:
|
|
|
|
| 99 |
|
| 100 |
@router.put("")
|
| 101 |
def update_one_document(data: Document):
|
| 102 |
+
# parsing params
|
| 103 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 104 |
+
# Services
|
| 105 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 106 |
try:
|
| 107 |
result = train_service.update_one_document(
|
| 108 |
data.document_id, data.page_content
|
|
|
|
| 123 |
@generator.response( status_code=200, schema={"message": "message", "result": {"document_id": "document_id"}} )"""
|
| 124 |
|
| 125 |
@router.delete("/{document_id}")
|
| 126 |
+
def delete_one_document(document_id: str, data: BasicReq):
|
| 127 |
+
# parsing params
|
| 128 |
+
setting, firebase_app = firebase_admin_with_setting(data)
|
| 129 |
+
# Services
|
| 130 |
+
train_service = TrainService(firebase_app=firebase_app)
|
| 131 |
try:
|
| 132 |
result = train_service.delete_one_document(document_id)
|
| 133 |
except Exception as e:
|
Brain/src/service/feedback_service.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
from os import remove
|
|
|
|
| 2 |
|
| 3 |
from firebase_admin import firestore
|
| 4 |
|
|
@@ -9,18 +10,23 @@ from Brain.src.model.feedback_model import FeedbackModel
|
|
| 9 |
|
| 10 |
|
| 11 |
class FeedbackService:
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
| 13 |
self.db = firestore.client()
|
| 14 |
self.feedbacks_ref = self.db.collection("feedbacks")
|
| 15 |
|
| 16 |
"""add a new feedback"""
|
| 17 |
|
| 18 |
def add(self, feedback: FeedbackModel):
|
|
|
|
| 19 |
return self.feedbacks_ref.document().set(feedback.to_json())
|
| 20 |
|
| 21 |
"""get list of feedback"""
|
| 22 |
|
| 23 |
def get(self, search, rating):
|
|
|
|
| 24 |
if rating == 0:
|
| 25 |
query = self.feedbacks_ref.where("rating", "!=", rating)
|
| 26 |
else:
|
|
|
|
| 1 |
from os import remove
|
| 2 |
+
from typing import Any
|
| 3 |
|
| 4 |
from firebase_admin import firestore
|
| 5 |
|
|
|
|
| 10 |
|
| 11 |
|
| 12 |
class FeedbackService:
|
| 13 |
+
db: Any
|
| 14 |
+
feedbacks_ref: Any
|
| 15 |
+
|
| 16 |
+
def init_firestore(self):
|
| 17 |
self.db = firestore.client()
|
| 18 |
self.feedbacks_ref = self.db.collection("feedbacks")
|
| 19 |
|
| 20 |
"""add a new feedback"""
|
| 21 |
|
| 22 |
def add(self, feedback: FeedbackModel):
|
| 23 |
+
self.init_firestore()
|
| 24 |
return self.feedbacks_ref.document().set(feedback.to_json())
|
| 25 |
|
| 26 |
"""get list of feedback"""
|
| 27 |
|
| 28 |
def get(self, search, rating):
|
| 29 |
+
self.init_firestore()
|
| 30 |
if rating == 0:
|
| 31 |
query = self.feedbacks_ref.where("rating", "!=", rating)
|
| 32 |
else:
|
Brain/src/service/train_service.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
| 1 |
"""service to manage trains"""
|
| 2 |
from typing import List, Any
|
| 3 |
|
|
|
|
|
|
|
| 4 |
from Brain.src.rising_plugin.csv_embed import get_embed
|
| 5 |
from Brain.src.rising_plugin.pinecone_engine import (
|
| 6 |
get_pinecone_index_namespace,
|
|
@@ -28,13 +30,20 @@ class TrainService:
|
|
| 28 |
key: document_id
|
| 29 |
values: {page_content}"""
|
| 30 |
|
| 31 |
-
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
self.documents_ref = self.db.collection("documents")
|
| 34 |
|
| 35 |
"""read all documents from firestore"""
|
| 36 |
|
| 37 |
def read_all_documents(self):
|
|
|
|
| 38 |
query = self.documents_ref.order_by("timestamp")
|
| 39 |
docs = query.stream()
|
| 40 |
result = []
|
|
@@ -48,6 +57,7 @@ class TrainService:
|
|
| 48 |
"""read one document from firestore"""
|
| 49 |
|
| 50 |
def read_one_document(self, document_id: str):
|
|
|
|
| 51 |
doc = self.documents_ref.document(document_id).get()
|
| 52 |
if doc.exists:
|
| 53 |
return {
|
|
@@ -60,6 +70,7 @@ class TrainService:
|
|
| 60 |
"""create a new document and train it"""
|
| 61 |
|
| 62 |
def create_one_document(self, page_content: str):
|
|
|
|
| 63 |
# Auto-generate document ID
|
| 64 |
auto_generated_doc_ref = self.documents_ref.document()
|
| 65 |
auto_generated_doc_ref.set(to_json(page_content))
|
|
@@ -70,6 +81,7 @@ class TrainService:
|
|
| 70 |
"""update a document by using id and train it"""
|
| 71 |
|
| 72 |
def update_one_document(self, document_id: str, page_content: str):
|
|
|
|
| 73 |
self.documents_ref.document(document_id).update(to_json(page_content))
|
| 74 |
self.train_one_document(document_id, page_content)
|
| 75 |
return {"document_id": document_id, "page_content": page_content}
|
|
@@ -77,11 +89,13 @@ class TrainService:
|
|
| 77 |
"""delete a document by using document_id"""
|
| 78 |
|
| 79 |
def delete_one_document(self, document_id: str):
|
|
|
|
| 80 |
self.documents_ref.document(document_id).delete()
|
| 81 |
self.delete_one_pinecone(document_id)
|
| 82 |
return {"document_id": document_id}
|
| 83 |
|
| 84 |
def train_all_documents(self) -> str:
|
|
|
|
| 85 |
self.delete_all()
|
| 86 |
documents = self.read_all_documents()
|
| 87 |
result = list()
|
|
@@ -98,6 +112,7 @@ class TrainService:
|
|
| 98 |
return "trained all documents successfully"
|
| 99 |
|
| 100 |
def train_one_document(self, document_id: str, page_content: str) -> None:
|
|
|
|
| 101 |
pinecone_namespace = self.get_pinecone_index_namespace()
|
| 102 |
result = list()
|
| 103 |
query_result = get_embed(page_content)
|
|
@@ -109,13 +124,17 @@ class TrainService:
|
|
| 109 |
add_pinecone(namespace=pinecone_namespace, key=key, value=vectoring_values)
|
| 110 |
|
| 111 |
def delete_all(self) -> Any:
|
|
|
|
| 112 |
return delete_all_pinecone(self.get_pinecone_index_namespace())
|
| 113 |
|
| 114 |
def delete_one_pinecone(self, document_id: str) -> Any:
|
|
|
|
| 115 |
return delete_pinecone(self.get_pinecone_index_namespace(), document_id)
|
| 116 |
|
| 117 |
def get_pinecone_index_namespace(self) -> str:
|
|
|
|
| 118 |
return get_pinecone_index_namespace(f"trains")
|
| 119 |
|
| 120 |
def get_pinecone_index_train_namespace(self) -> str:
|
|
|
|
| 121 |
return get_pinecone_index_namespace(f"trains")
|
|
|
|
| 1 |
"""service to manage trains"""
|
| 2 |
from typing import List, Any
|
| 3 |
|
| 4 |
+
import firebase_admin
|
| 5 |
+
|
| 6 |
from Brain.src.rising_plugin.csv_embed import get_embed
|
| 7 |
from Brain.src.rising_plugin.pinecone_engine import (
|
| 8 |
get_pinecone_index_namespace,
|
|
|
|
| 30 |
key: document_id
|
| 31 |
values: {page_content}"""
|
| 32 |
|
| 33 |
+
db: Any
|
| 34 |
+
documents_ref: Any
|
| 35 |
+
|
| 36 |
+
def __init__(self, firebase_app: firebase_admin.App):
|
| 37 |
+
self.firebase_app = firebase_app
|
| 38 |
+
|
| 39 |
+
def init_firestore(self):
|
| 40 |
+
self.db = firestore.client(app=self.firebase_app)
|
| 41 |
self.documents_ref = self.db.collection("documents")
|
| 42 |
|
| 43 |
"""read all documents from firestore"""
|
| 44 |
|
| 45 |
def read_all_documents(self):
|
| 46 |
+
self.init_firestore()
|
| 47 |
query = self.documents_ref.order_by("timestamp")
|
| 48 |
docs = query.stream()
|
| 49 |
result = []
|
|
|
|
| 57 |
"""read one document from firestore"""
|
| 58 |
|
| 59 |
def read_one_document(self, document_id: str):
|
| 60 |
+
self.init_firestore()
|
| 61 |
doc = self.documents_ref.document(document_id).get()
|
| 62 |
if doc.exists:
|
| 63 |
return {
|
|
|
|
| 70 |
"""create a new document and train it"""
|
| 71 |
|
| 72 |
def create_one_document(self, page_content: str):
|
| 73 |
+
self.init_firestore()
|
| 74 |
# Auto-generate document ID
|
| 75 |
auto_generated_doc_ref = self.documents_ref.document()
|
| 76 |
auto_generated_doc_ref.set(to_json(page_content))
|
|
|
|
| 81 |
"""update a document by using id and train it"""
|
| 82 |
|
| 83 |
def update_one_document(self, document_id: str, page_content: str):
|
| 84 |
+
self.init_firestore()
|
| 85 |
self.documents_ref.document(document_id).update(to_json(page_content))
|
| 86 |
self.train_one_document(document_id, page_content)
|
| 87 |
return {"document_id": document_id, "page_content": page_content}
|
|
|
|
| 89 |
"""delete a document by using document_id"""
|
| 90 |
|
| 91 |
def delete_one_document(self, document_id: str):
|
| 92 |
+
self.init_firestore()
|
| 93 |
self.documents_ref.document(document_id).delete()
|
| 94 |
self.delete_one_pinecone(document_id)
|
| 95 |
return {"document_id": document_id}
|
| 96 |
|
| 97 |
def train_all_documents(self) -> str:
|
| 98 |
+
self.init_firestore()
|
| 99 |
self.delete_all()
|
| 100 |
documents = self.read_all_documents()
|
| 101 |
result = list()
|
|
|
|
| 112 |
return "trained all documents successfully"
|
| 113 |
|
| 114 |
def train_one_document(self, document_id: str, page_content: str) -> None:
|
| 115 |
+
self.init_firestore()
|
| 116 |
pinecone_namespace = self.get_pinecone_index_namespace()
|
| 117 |
result = list()
|
| 118 |
query_result = get_embed(page_content)
|
|
|
|
| 124 |
add_pinecone(namespace=pinecone_namespace, key=key, value=vectoring_values)
|
| 125 |
|
| 126 |
def delete_all(self) -> Any:
|
| 127 |
+
self.init_firestore()
|
| 128 |
return delete_all_pinecone(self.get_pinecone_index_namespace())
|
| 129 |
|
| 130 |
def delete_one_pinecone(self, document_id: str) -> Any:
|
| 131 |
+
self.init_firestore()
|
| 132 |
return delete_pinecone(self.get_pinecone_index_namespace(), document_id)
|
| 133 |
|
| 134 |
def get_pinecone_index_namespace(self) -> str:
|
| 135 |
+
self.init_firestore()
|
| 136 |
return get_pinecone_index_namespace(f"trains")
|
| 137 |
|
| 138 |
def get_pinecone_index_train_namespace(self) -> str:
|
| 139 |
+
self.init_firestore()
|
| 140 |
return get_pinecone_index_namespace(f"trains")
|
app.py
CHANGED
|
@@ -5,8 +5,6 @@ import uvicorn
|
|
| 5 |
from Brain.src.router.browser_router import construct_blueprint_browser_api
|
| 6 |
from Brain.src.router.train_router import construct_blueprint_train_api
|
| 7 |
|
| 8 |
-
initialize_app()
|
| 9 |
-
|
| 10 |
from Brain.src.router.api import construct_blueprint_api
|
| 11 |
|
| 12 |
app = FastAPI()
|
|
|
|
| 5 |
from Brain.src.router.browser_router import construct_blueprint_browser_api
|
| 6 |
from Brain.src.router.train_router import construct_blueprint_train_api
|
| 7 |
|
|
|
|
|
|
|
| 8 |
from Brain.src.router.api import construct_blueprint_api
|
| 9 |
|
| 10 |
app = FastAPI()
|