Spaces:
Sleeping
Sleeping
| import os | |
| from flask import Flask, render_template, request, jsonify | |
| from transformers import pipeline | |
| # from google import genai | |
| # from google.genai import types | |
| import google.generativeai as genai | |
| import random | |
| import markdown # Converts Markdown to HTML | |
| app = Flask(__name__, static_folder='static', template_folder='templates') | |
| # Set a writable Hugging Face cache directory in /tmp | |
| CACHE_DIR = "/tmp/huggingface_cache" | |
| os.environ["HF_HOME"] = CACHE_DIR # Use HF_HOME instead of TRANSFORMERS_CACHE | |
| os.environ["HF_HOME"] = os.environ.get("TRANSFORMERS_CACHE", "~/.cache/huggingface") | |
| # Ensure the cache directory exists | |
| if not os.path.exists(CACHE_DIR): | |
| try: | |
| os.makedirs(CACHE_DIR, exist_ok=True) | |
| except PermissionError: | |
| print(f"β Permission Denied: Cannot create cache directory at {CACHE_DIR}") | |
| exit(1) | |
| # Set up Google Gemini Nano (Ensure you have Google AI SDK installed: pip install google-generativeai) | |
| GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
| genai.configure(api_key=GOOGLE_API_KEY) | |
| if GOOGLE_API_KEY is None: | |
| raise ValueError(" π¨ GOOGLE_API_KEY is missing! Add it in HF β Settings β Secrets") | |
| # Load the Gemini Nano model | |
| model = genai.GenerativeModel("gemini-2.0-flash-live") | |
| #"gemini-1.5-flash" # Use "gemini-pro" or "gemini-nano" if available ""gemini-pro-vision"" | |
| # response = model.generate_content("Tell me a short, about yourself ") | |
| # print(response.text,"\n\n β Working Status : <-> \n") | |
| def home(): | |
| return render_template("index.html") | |
| def ai(): | |
| return render_template("ai.html") | |
| def about(): | |
| return render_template("about.html") | |
| def contact(): | |
| return render_template("contact.html") | |
| def services(): | |
| return render_template("services.html") | |
| def library(): | |
| return render_template("library.html") | |
| def list_models(): | |
| try: | |
| models = genai.list_models() | |
| return jsonify([m.name for m in models]) | |
| except Exception as e: | |
| return jsonify({"error": str(e)}), 500 | |
| #------- | |
| #----- | |
| def chat(): | |
| user_message = request.json.get("message", "") | |
| if not user_message: | |
| return jsonify({"response": "Please enter a message!"}) | |
| # Generate chatbot response | |
| response = model.generate_content(user_message).text | |
| # Convert Markdown response to HTML | |
| markdown_response = markdown.markdown(response) | |
| return jsonify({"response": markdown_response}) | |
| # Sample Questions and Predefined Answers | |
| sample_responses = { | |
| "What is AI?": "AI, or Artificial Intelligence, is the simulation of human intelligence in machines.", | |
| "How does machine learning work?": "Machine learning is a subset of AI that allows systems to learn from data without explicit programming.", | |
| "What are neural networks?": "Neural networks are a series of algorithms that recognize patterns, mimicking the human brain.", | |
| "Explain the Turing Test.": "The Turing Test evaluates a machine's ability to exhibit human-like intelligence in conversation.", | |
| "How does deep learning differ from AI?": "Deep learning is a subset of machine learning that uses neural networks with multiple layers.", | |
| "Hello": "Hi there! How can I help you?", | |
| "How are you?": "I'm just a bot from Edu MentorAI, but I'm here to help!" | |
| } | |
| def ask(): | |
| question = request.json.get("question", "").strip() | |
| if not question: | |
| return jsonify({"response": "Please enter a question!"}) | |
| if question in sample_responses: | |
| return jsonify({"response": sample_responses[question]}) | |
| # Generate AI response | |
| response = model.generate_content(question).text | |
| # Convert Markdown response to HTML | |
| markdown_response = markdown.markdown(response) | |
| return jsonify({"response": markdown_response}) | |
| if __name__ == "__main__": | |
| app.run(host="0.0.0.0", port=7860, debug=True) | |