from fastapi import FastAPI, File, UploadFile from fastapi.responses import JSONResponse import tensorflow as tf import numpy as np import shutil import os from huggingface_hub import InferenceClient import json from langchain_community.agent_toolkits import GmailToolkit import time import requests from datetime import datetime # Initialize FastAPI app app = FastAPI() @app.get("/") def read_root(): # toolkit = GmailToolkit() return {"message": "Connection"} keep_alive() @app.get("/ping") def ping(): return {"status": "alive"} UPLOAD_FOLDER = "uploaded_pdfs" os.makedirs(UPLOAD_FOLDER, exist_ok=True) @app.post("/upload-pdf") async def upload_pdf(file: UploadFile = File(...)): try: file_location = os.path.join(UPLOAD_FOLDER, file.filename) with open(file_location, "wb") as buffer: shutil.copyfileobj(file.file, buffer) return JSONResponse(content={"message": f"Uploaded {file.filename} successfully"}, status_code=200) except Exception as e: return JSONResponse(content={"error": str(e)}, status_code=500) @app.get("/list-files") def list_uploaded_files(): files = os.listdir(UPLOAD_FOLDER) return {"files": files} def keep_alive(space_url="https://1mr-apigmail.hf.space/ping", interval_hours=5): while True: try: print(f"🔄 Pinging {space_url} at {datetime.now()}") response = requests.get(space_url) if response.status_code == 200: print("") else: print("") except Exception as e: print("") time.sleep(interval_hours * 3600) # Call the function # keep_alive() # # API endpoint for prediction # @app.post("/predict") # async def predict_image(file: UploadFile = File(...)): # try: # # Save the uploaded file # file_location = f"./temp_{file.filename}" # with open(file_location, "wb") as f: # shutil.copyfileobj(file.file, f) # # Predict the label # prediction = predict_label(model, file_location, class_labels) # # Remove the temporary file # os.remove(file_location) # return {"predicted_label": prediction} # except Exception as e: # return JSONResponse( # status_code=500, # content={"error": f"An error occurred: {str(e)}"} # ) # @app.post("/predictNUT") # async def predict_image_and_nutrition(file: UploadFile = File(...)): # try: # # Save the uploaded file # file_location = f"./temp_{file.filename}" # with open(file_location, "wb") as f: # shutil.copyfileobj(file.file, f) # # Predict the label using the same prediction logic # prediction = predict_label(model, file_location, class_labels) # # Remove the temporary file # os.remove(file_location) # # Define the repository ID and your token # #repo_id = "google/gemma-2-9b-it" # repo_id = "Qwen/Qwen2.5-72B-Instruct" # # repo_id = "microsoft/Phi-3-mini-4k-instruct" # #repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1" # api_token = "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21" # # Initialize the InferenceClient with your token # llm_client = InferenceClient( # model=repo_id, # token=api_token[:-2], # Pass the token here # timeout=120, # ) # # Function to call the LLM # def call_llm(inference_client: InferenceClient, prompt: str): # response = inference_client.post( # json={ # "inputs": prompt, # "parameters": {"max_new_tokens": 500}, # "task": "text-generation", # }, # ) # return json.loads(response.decode())[0]["generated_text"] # # Use the prediction to generate nutrition information # # prompt = f"Nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, Pantothenic acid) for {prediction} in formatted list" # # # prompt = f"Provide all the nutrition information for {prediction}, including Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, and Pantothenic acid. Please present the information in a clear, formatted list only, without additional explanations." # # response = call_llm(llm_client, prompt) # # return {"predicted_label": prediction, "nutrition_info": response} # # nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6, Folate, Niacin, Pantothenic acid) for {prediction} per 100 grams in a formatted list only." # nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6) for {prediction} per 100 grams, Output the information as a concise, formatted list without repetition." # nutrition_info = call_llm(llm_client, nutrition_prompt) # # # Second prompt: Health benefits and tips # health_benefits_prompt = f"Provide the health benefits and considerations for {prediction}. Additionally, include practical tips for making {prediction} healthier. Keep the response focused on these two aspects only." # # health_benefits_prompt = f"Provide detailed information about {prediction}, including its origin, common uses, cultural significance, and any interesting facts. Keep the response informative and well-structured." # Information = call_llm(llm_client, health_benefits_prompt) # recipes_prompt=f"Tell me about the two most famous recipes for {prediction}. Include the ingredients only." # recipes_info=call_llm(llm_client, recipes_prompt) # return { # "Predicted_label": prediction, # "Nutrition_info": nutrition_info, # "Information": Information, # "Recipes":recipes_info # } # except Exception as e: # return JSONResponse( # status_code=500, # content={"error": f"An error occurred: {str(e)}"} # ) # #nutrition_prompt = f"Provide the nutrition information (Calories, Protein, Carbohydrates, Dietary Fiber, Sugars, Fat, Sodium, Potassium, Vitamin C, Vitamin B6) for {prediction} in a formatted list only." # # nutrition_info = call_llm(llm_client, nutrition_prompt) # # # Second prompt: Health benefits and tips # # health_benefits_prompt = f"Provide the health benefits and considerations for {prediction} and give tips for making it healthier." # # health_benefits_and_tips = call_llm(llm_client, health_benefits_prompt) # # return { # # "predicted_label": prediction, # # "nutrition_info": nutrition_info, # # "health_benefits_and_tips": health_benefits_and_tips # # }