File size: 7,219 Bytes
1d3b57e 3bcda3d c97a6a4 3bcda3d 1d3b57e e308e56 1d45521 3bcda3d 9f1abff 3bcda3d aae0bb9 c97a6a4 aae0bb9 6025050 c97a6a4 3e13d5e c97a6a4 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 1d3b57e 3bcda3d 9d6a162 3bcda3d f5c663e 3bcda3d d673464 3bcda3d d74864e 3bcda3d f5c663e 3bcda3d f5c663e 3bcda3d f5c663e 3bcda3d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import JSONResponse
import tensorflow as tf
import numpy as np
import shutil
import os
from huggingface_hub import InferenceClient
import json
from langchain_community.agent_toolkits import GmailToolkit
import time
import requests
from datetime import datetime
# Initialize FastAPI app
app = FastAPI()
@app.get("/")
def read_root():
# toolkit = GmailToolkit()
return {"message": "Connection"}
keep_alive()
@app.get("/ping")
def ping():
return {"status": "alive"}
UPLOAD_FOLDER = "uploaded_pdfs"
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
@app.post("/upload-pdf")
async def upload_pdf(file: UploadFile = File(...)):
try:
file_location = os.path.join(UPLOAD_FOLDER, file.filename)
with open(file_location, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
return JSONResponse(content={"message": f"Uploaded {file.filename} successfully"}, status_code=200)
except Exception as e:
return JSONResponse(content={"error": str(e)}, status_code=500)
@app.get("/list-files")
def list_uploaded_files():
files = os.listdir(UPLOAD_FOLDER)
return {"files": files}
def keep_alive(space_url="https://1mr-apigmail.hf.space/ping", interval_hours=5):
while True:
try:
print(f"🔄 Pinging {space_url} at {datetime.now()}")
response = requests.get(space_url)
if response.status_code == 200:
print("")
else:
print("")
except Exception as e:
print("")
time.sleep(interval_hours * 3600)
# Call the function
#
keep_alive()
# # API endpoint for prediction
# @app.post("/predict")
# async def predict_image(file: UploadFile = File(...)):
# try:
# # Save the uploaded file
# file_location = f"./temp_{file.filename}"
# with open(file_location, "wb") as f:
# shutil.copyfileobj(file.file, f)
# # Predict the label
# prediction = predict_label(model, file_location, class_labels)
# # Remove the temporary file
# os.remove(file_location)
# return {"predicted_label": prediction}
# except Exception as e:
# return JSONResponse(
# status_code=500,
# content={"error": f"An error occurred: {str(e)}"}
# )
# @app.post("/predictNUT")
# async def predict_image_and_nutrition(file: UploadFile = File(...)):
# try:
# # Save the uploaded file
# file_location = f"./temp_{file.filename}"
# with open(file_location, "wb") as f:
# shutil.copyfileobj(file.file, f)
# # Predict the label using the same prediction logic
# prediction = predict_label(model, file_location, class_labels)
# # Remove the temporary file
# os.remove(file_location)
# # Define the repository ID and your token
# #repo_id = "google/gemma-2-9b-it"
# repo_id = "Qwen/Qwen2.5-72B-Instruct"
# # repo_id = "microsoft/Phi-3-mini-4k-instruct"
# #repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
# api_token = "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21"
# # Initialize the InferenceClient with your token
# llm_client = InferenceClient(
# model=repo_id,
# token=api_token[:-2], # Pass the token here
# timeout=120,
# )
# # Function to call the LLM
# def call_llm(inference_client: InferenceClient, prompt: str):
# response = inference_client.post(
# json={
# "inputs": prompt,
# "parameters": {"max_new_tokens": 500},
# "task": "text-generation",
# },
# )
# return json.loads(response.decode())[0]["generated_text"]
# # Use the prediction to generate nutrition information
# # prompt = f"Nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, Pantothenic acid) for {prediction} in formatted list"
# # # prompt = f"Provide all the nutrition information for {prediction}, including Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, and Pantothenic acid. Please present the information in a clear, formatted list only, without additional explanations."
# # response = call_llm(llm_client, prompt)
# # return {"predicted_label": prediction, "nutrition_info": response}
# # nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, Pantothenic acid) for {prediction} per 100 grams in a formatted list only."
# nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6) for {prediction} per 100 grams, Output the information as a concise, formatted list without repetition."
# nutrition_info = call_llm(llm_client, nutrition_prompt)
# # # Second prompt: Health benefits and tips
# health_benefits_prompt = f"Provide the health benefits and considerations for {prediction}. Additionally, include practical tips for making {prediction} healthier. Keep the response focused on these two aspects only."
# # health_benefits_prompt = f"Provide detailed information about {prediction}, including its origin, common uses, cultural significance, and any interesting facts. Keep the response informative and well-structured."
# Information = call_llm(llm_client, health_benefits_prompt)
# recipes_prompt=f"Tell me about the two most famous recipes for {prediction}. Include the ingredients only."
# recipes_info=call_llm(llm_client, recipes_prompt)
# return {
# "Predicted_label": prediction,
# "Nutrition_info": nutrition_info,
# "Information": Information,
# "Recipes":recipes_info
# }
# except Exception as e:
# return JSONResponse(
# status_code=500,
# content={"error": f"An error occurred: {str(e)}"}
# )
# #nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6) for {prediction} in a formatted list only."
# # nutrition_info = call_llm(llm_client, nutrition_prompt)
# # # Second prompt: Health benefits and tips
# # health_benefits_prompt = f"Provide the health benefits and considerations for {prediction} and give tips for making it healthier."
# # health_benefits_and_tips = call_llm(llm_client, health_benefits_prompt)
# # return {
# # "predicted_label": prediction,
# # "nutrition_info": nutrition_info,
# # "health_benefits_and_tips": health_benefits_and_tips
# # } |