AumCoreAI commited on
Commit
df954f5
·
1 Parent(s): e0b5fb0

FINAL DEPLOY: Vision AI logic integrated with AICore

Browse files
Files changed (2) hide show
  1. app.py +22 -8
  2. main.py +32 -9
app.py CHANGED
@@ -1,24 +1,38 @@
1
-
2
  import gradio as gr
3
  import os
4
  from main import AICore
5
 
6
- api_key = os.environ.get("GROQ_API_KEY")
 
7
  ai = AICore(api_key=api_key)
8
 
9
  def chat_func(text, img, history):
10
- if not text and not img: return "", history
 
 
 
11
  response = ai.get_response(text or "Describe this image", img)
 
 
12
  history.append((text or "Image Sent", response))
13
  return "", history
14
 
15
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
16
- gr.Markdown("# 🤖 AumCore-M7B Vision AI (Direct Update)")
17
- chat = gr.Chatbot(height=500)
 
18
  with gr.Row():
19
- txt = gr.Textbox(show_label=False, placeholder="Sawal puchiye...", scale=7)
20
- img = gr.Image(type="filepath", label="Upload Photo", scale=3)
21
- txt.submit(chat_func, [txt, img, chat], [txt, chat])
 
 
 
 
 
 
 
22
 
23
  if __name__ == "__main__":
 
24
  demo.launch(server_name="0.0.0.0", server_port=7860)
 
 
1
  import gradio as gr
2
  import os
3
  from main import AICore
4
 
5
+ # API Key check
6
+ api_key = os.environ.get("GROQ_API_KEY", "Yahan_Apni_Groq_Key_Dalo_Ya_HF_Secret_Use_Karo")
7
  ai = AICore(api_key=api_key)
8
 
9
  def chat_func(text, img, history):
10
+ if not text and not img:
11
+ return "", history
12
+
13
+ # AI se response lena
14
  response = ai.get_response(text or "Describe this image", img)
15
+
16
+ # History update
17
  history.append((text or "Image Sent", response))
18
  return "", history
19
 
20
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
21
+ gr.Markdown("## 🤖 AumCore-M7B Real AI Vision")
22
+
23
+ chatbot = gr.Chatbot(height=500)
24
  with gr.Row():
25
+ with gr.Column(scale=8):
26
+ txt = gr.Textbox(
27
+ show_label=False,
28
+ placeholder="AumCore se kuch bhi puchiye...",
29
+ container=False
30
+ )
31
+ with gr.Column(scale=2):
32
+ img = gr.Image(type="filepath", label="Vision")
33
+
34
+ txt.submit(chat_func, [txt, img, chatbot], [txt, chatbot])
35
 
36
  if __name__ == "__main__":
37
+ # HF Space port 7860 use karta hai
38
  demo.launch(server_name="0.0.0.0", server_port=7860)
main.py CHANGED
@@ -1,19 +1,42 @@
1
-
2
  import os
 
3
  from groq import Groq
4
 
5
  class AICore:
6
- def __init__(self, api_key=None):
7
- self.client = Groq(api_key=api_key or os.environ.get("GROQ_API_KEY"))
 
 
 
 
 
 
 
 
 
 
8
 
9
- def get_response(self, prompt, image_path=None):
 
 
 
 
 
10
  try:
11
- # Simple Text + Image logic for Groq
12
- messages = [{"role": "user", "content": prompt}]
 
 
13
  completion = self.client.chat.completions.create(
14
- model="llama-3.2-11b-vision-preview",
15
- messages=messages
 
16
  )
17
- return completion.choices[0].message.content
 
 
18
  except Exception as e:
19
  return f"Error: {str(e)}"
 
 
 
 
 
1
  import os
2
+ import json
3
  from groq import Groq
4
 
5
  class AICore:
6
+ def __init__(self, api_key):
7
+ self.client = Groq(api_key=api_key)
8
+ self.model = "qwen-2-vl-7b-instruct" # Sanjay bhai, vision ke liye best hai
9
+ self.memory_file = "memory.json"
10
+ self._load_memory()
11
+
12
+ def _load_memory(self):
13
+ if os.path.exists(self.memory_file):
14
+ with open(self.memory_file, 'r') as f:
15
+ self.memory = json.load(f)
16
+ else:
17
+ self.memory = []
18
 
19
+ def save_to_memory(self, user_input, ai_response):
20
+ self.memory.append({"user": user_input, "bot": ai_response})
21
+ with open(self.memory_file, 'w') as f:
22
+ json.dump(self.memory[-10:], f) # Last 10 chats yaad rakhega
23
+
24
+ def get_response(self, text, image_path=None):
25
  try:
26
+ # Simple text + vision prompt setup
27
+ messages = [{"role": "user", "content": text}]
28
+ # Future: Image processing logic can be expanded here
29
+
30
  completion = self.client.chat.completions.create(
31
+ model=self.model,
32
+ messages=messages,
33
+ temperature=0.7
34
  )
35
+ response = completion.choices[0].message.content
36
+ self.save_to_memory(text, response)
37
+ return response
38
  except Exception as e:
39
  return f"Error: {str(e)}"
40
+
41
+ if __name__ == "__main__":
42
+ print("AICore is ready!")