qbapi / main.py
rairo's picture
Update main.py
d9c4bdc verified
raw
history blame
4.17 kB
from langchain_google_genai import ChatGoogleGenerativeAI
import pandas as pd
import os
import io
from flask import Flask, request, jsonify
from flask_cors import CORS, cross_origin
import pandas as pd
import firebase_admin
from firebase_admin import credentials, firestore, auth
import requests
import pandas as pd
from datetime import datetime
import os
from pandasai.llm import GoogleGemini
from pandasai import SmartDataframe, SmartDatalake
from pandasai.responses.response_parser import ResponseParser
import matplotlib.pyplot as plt
from wordcloud import WordCloud
import random
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from dotenv import load_dotenv
import json
from dotenv import load_dotenv
load_dotenv()
app = Flask(__name__)
cors = CORS(app)
class FlaskResponse(ResponseParser):
def __init__(self, context) -> None:
super().__init__(context)
def format_dataframe(self, result):
return result['value'].to_html()
def format_plot(self, result):
# Save the plot using savefig
try:
img_path = result['value']
except ValueError:
img_path = str(result['value'])
print("value error!", img_path)
print("response_class_path:", img_path)
return img_path
def format_other(self, result):
return str(result['value'])
gemini_api_key = os.environ['Gemini']
@app.route("/", methods=["GET"])
def home():
return "Hello Qx!"
llm = GoogleGemini(api_key=gemini_api_key)
llm2 = ChatGoogleGenerativeAI(model='gemini-1.5-flash-001', temperature=0.1)
# Initialize Firebase app
if not firebase_admin._apps:
cred = credentials.Certificate("quant-app-99d09-firebase-adminsdk-6prb1-37f34e1c91.json")
firebase_admin.initialize_app(cred)
db = firestore.client()
@app.route("/predict", methods=["POST"])
@cross_origin()
def bot():
user_id = request.json.get("user_id")
user_question = request.json.get("user_question")
load_dotenv()
inventory_ref = db.collection("system_users").document(user_id).collection('inventory')
tasks_ref = db.collection("system_users").document(user_id).collection('tasks')
transactions_ref = db.collection("system_users").document(user_id).collection('transactions')
inventory_list = []
for doc in inventory_ref.stream():
a = doc.to_dict()
inventory_list.append(a)
tasks_list = []
for doc in tasks_ref.stream():
a = doc.to_dict()
tasks_list.append(a)
transactions_list = []
for doc in transactions_ref.stream():
a = doc.to_dict()
transactions_list.append(a)
inventory_df = pd.DataFrame(inventory_list)
transactions_df = pd.DataFrame(transactions_list)
tasks_df = pd.DataFrame(tasks_list)
lake = SmartDatalake([inventory_df, transactions_df, tasks_df], config={"llm":llm, "response_parser":FlaskResponse, "enable_cache": False, "save_logs":False})
response = lake.chat(user_question)
print(user_question)
resp = str(response)
return jsonify(resp)
@app.route("/mrec", methods=["POST"])
@cross_origin()
def marketing_rec():
user_id = request.json.get("user_id")
transactions_ref = db.collection("system_users").document(user_id).collection('transactions')
transactions_list = []
for doc in transactions_ref.stream():
a = doc.to_dict()
transactions_list.append(a)
transactions_df = pd.DataFrame(transactions_list)
# Set up a prompt template
prompt = PromptTemplate.from_template('You are a business analyst.In the fewest words possible, write a brief analysis and some very brief marketing tips suitable for a small business with this transactions data {data_frame}')
# Create a chain that utilizes both the LLM and the prompt template
chain = LLMChain(llm=llm2, prompt=prompt, verbose=True)
data_frame = transactions_df
response = chain.invoke(input=data_frame)
print(response)
resp = str(response['text'])
return jsonify(resp)
if __name__ == "__main__":
app.run(debug=True,host="0.0.0.0", port=7860)