File size: 3,609 Bytes
d712323
5143ee0
310d364
a322f67
551ca88
bda28a6
6191794
4ddbb60
1af111a
95d0676
ae46e67
95d0676
8084dd5
1096c1b
 
 
95d0676
ae46e67
 
 
 
 
 
 
28c0272
9f58777
28c0272
 
 
03a9217
1af111a
fc4f860
91cad76
5143ee0
fc4f860
192e09d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1b147e8
1096c1b
1b147e8
 
c3a1b48
 
 
 
 
 
 
 
bda28a6
 
 
 
 
 
c3a1b48
551ca88
 
 
 
 
 
e0fdabe
 
 
551ca88
 
c3a1b48
 
551ca88
c3a1b48
4df31af
c3a1b48
 
551ca88
 
 
2a31ecf
4df31af
551ca88
2a31ecf
4df31af
d0c0917
a6b77cd
d0c0917
310d364
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import os
from flask import Flask, render_template, request, jsonify
from transformers import pipeline
import google.generativeai as genai
import random
import markdown  # Converts Markdown to HTML

app = Flask(__name__, static_folder='static', template_folder='templates')

# Set a writable Hugging Face cache directory in /tmp
CACHE_DIR = "/tmp/huggingface_cache"
os.environ["HF_HOME"] = CACHE_DIR  # Use HF_HOME instead of TRANSFORMERS_CACHE

os.environ["HF_HOME"] = os.environ.get("TRANSFORMERS_CACHE", "~/.cache/huggingface")


# Ensure the cache directory exists
if not os.path.exists(CACHE_DIR):
    try:
        os.makedirs(CACHE_DIR, exist_ok=True)
    except PermissionError:
        print(f"❌ Permission Denied: Cannot create cache directory at {CACHE_DIR}")
        exit(1)

# Set up Google Gemini Nano (Ensure you have Google AI SDK installed: pip install google-generativeai)
GOOGLE_API_KEY = "AIzaSyCuAIelmhXYbJWJMy-shk0x1el7t4hp0xQ" #"AIzaSyDBiTxdCAY6uWSmEDMDsI0o_8wndXZeFt8"  #problem:"AIzaSyBA4icnk8d_2Gt_dC_ZsCvCDzLVhSaf_-Y"  # API key
genai.configure(api_key=GOOGLE_API_KEY)

# Load the Gemini Nano model
model = genai.GenerativeModel("gemini-1.5-flash")  # Use "gemini-pro" or "gemini-nano" if available ""gemini-pro-vision""

@app.route("/")
def home():
    return render_template("index.html")

@app.route("/ai")
def ai():
    return render_template("ai.html")

@app.route("/about")
def about():
    return render_template("about.html")

@app.route("/contact")
def contact():
    return render_template("contact.html")

@app.route("/services")
def services():
    return render_template("services.html")

@app.route("/library")
def library():
    return render_template("library.html")

#-------

#-----

@app.route("/chat", methods=["POST"])
def chat():
    user_message = request.json.get("message", "")
    if not user_message:
        return jsonify({"response": "Please enter a message!"})

    # Generate chatbot response
    response = model.generate_content(user_message).text


    # Convert Markdown response to HTML
    markdown_response = markdown.markdown(response)

    return jsonify({"response": markdown_response})

# Sample Questions and Predefined Answers
sample_responses = {
    "What is AI?": "AI, or Artificial Intelligence, is the simulation of human intelligence in machines.",
    "How does machine learning work?": "Machine learning is a subset of AI that allows systems to learn from data without explicit programming.",
    "What are neural networks?": "Neural networks are a series of algorithms that recognize patterns, mimicking the human brain.",
    "Explain the Turing Test.": "The Turing Test evaluates a machine's ability to exhibit human-like intelligence in conversation.",
    "How does deep learning differ from AI?": "Deep learning is a subset of machine learning that uses neural networks with multiple layers.",
    "Hello": "Hi there! How can I help you?",
    "How are you?": "I'm just a bot from Edu MentorAI, but I'm here to help!"
}

@app.route("/ask", methods=["POST"])
def ask():
    question = request.json.get("question", "").strip()


    if not question:
        return jsonify({"response": "Please enter a question!"})
    if question in sample_responses:
        return jsonify({"response": sample_responses[question]})

    # Generate AI response
    response = model.generate_content(question).text  

    # Convert Markdown response to HTML
    markdown_response = markdown.markdown(response)

    return jsonify({"response": markdown_response})
if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860, debug=True)