NIUS-Nithin_AI / app.py
NithinAI12's picture
Upload folder using huggingface_hub
9ee9fc6 verified
# ==============================================================================
# ▄ NIUS: THE ENDGAME SYSTEM (FINAL PERFECTED) ▄
# ==============================================================================
# ✨ BRAIN: Qwen 2.5 (Context-Forced + Refusal Eraser)
# ✨ SEARCH: Universal Link (DuckDuckGo + Wikipedia) -> 100% Uptime
# ✨ MOTION: Lightning Engine (AnimateDiff -> ZeroScope -> GIF Fallback)
# ✨ VISION: Flux (Pollinations)
# ✨ CODE: Dedicated Side Output Area
import os
import sys
import random
import gradio as gr
from huggingface_hub import hf_hub_download
from gradio_client import Client
from llama_cpp import Llama
print(">>> ✨ LAUNCHING NIUS CORE...")
# --- LOAD INTELLIGENCE ---
repo = "Qwen/Qwen2.5-Coder-7B-Instruct-GGUF"
filename = "qwen2.5-coder-7b-instruct-q4_k_m.gguf"
try:
model_path = hf_hub_download(repo_id=repo, filename=filename)
except:
model_path = hf_hub_download(repo_id="TheBloke/Mistral-7B-Instruct-v0.1-GGUF", filename="mistral-7b-instruct-v0.1.Q4_K_M.gguf")
llm = Llama(
model_path=model_path,
n_gpu_layers=-1,
n_ctx=8192,
verbose=False
)
# --- UNIVERSAL WEB SEARCH ---
def search_real_web(query):
print(f">>> ✨ SEARCHING LIVE WEB FOR: {query}")
context_data = ""
clean_query = query.replace("search", "").replace("google", "").replace("find", "").strip()
try:
from duckduckgo_search import DDGS
results = DDGS().text(clean_query, max_results=5)
if results:
context_data += "--- [LIVE INTERNET DATA START] ---\n"
for r in results:
context_data += f"• INFO: {r.get('body', r.get('title', ''))}\n"
context_data += f"• SOURCE: {r.get('href', 'Web')}\n\n"
context_data += "--- [LIVE INTERNET DATA END] ---\n"
except Exception as e:
print(f"DDG Search Error: {e}")
try:
if len(context_data) < 50:
import wikipedia
wiki_data = wikipedia.summary(clean_query, sentences=3)
context_data += f"--- [FACTUAL BACKUP] ---\n{wiki_data}\n"
except:
pass
return context_data
# ==============================================================================
# ▄ NIUS MOTION ENGINE (WITH NAI WATERMARK)
# ==============================================================================
def add_watermark(input_path, text="NAI"):
"""
Uses FFmpeg to burn a permanent watermark into the video/GIF.
"""
try:
output_path = input_path.replace(".", "_watermarked.")
# FFmpeg command to add text in bottom right corner with black shadow for visibility
cmd = f'ffmpeg -y -i "{input_path}" -vf "drawtext=text=\\'{text}\\'::x=W-tw-10:y=H-th-10:fontsize=40:fontcolor=white:shadowcolor=black:shadowx=2:shadowy=2" -codec:a copy "{output_path}" -hide_banner -loglevel error'
os.system(cmd)
return output_path
except Exception as e:
print(f"Watermark Error: {e}")
return input_path # Return original if watermark fails
def nius_video(prompt):
print(f">>> ✨ NIUS MOTION: Initializing Video Protocol for '{prompt}'...")
raw_video_path = None
media_type = "video"
# --- ATTEMPT 1: ANIMATEDIFF LIGHTNING (Fastest Video) ---
try:
print(">> Connecting to AnimateDiff-Lightning (High Speed)...")
client = Client("ByteDance/AnimateDiff-Lightning")
# Generates a 2-4 second MP4
result = client.predict(prompt, "1-step", api_name="/generate")
raw_video_path = result['video'] # Gradio client usually returns a dict or path
# Fix for some clients returning just the path string
if not isinstance(raw_video_path, str):
raw_video_path = result
except Exception as e:
print(f"Server 1 Busy ({e}). Switching to Backup...")
# --- ATTEMPT 2: ZEROSCOPE (Reliable Backup) ---
if not raw_video_path:
try:
print(">> Connecting to ZeroScope Server (Stable)...")
client = Client("cerspense/zeroscope_v2_576w")
raw_video_path = client.predict(prompt, api_name="/infer")
except Exception as e:
print(f"Server 2 Busy. Engaging Fallback Protocol...")
# --- ATTEMPT 3: POLLINATIONS MOTION (Fail-Safe GIF) ---
if not raw_video_path:
print(">> All Video Clusters Occupied. Generating Motion Preview...")
seed = random.randint(0, 99999)
safe_prompt = prompt.replace(" ", "%20")
# Generate a GIF URL
gif_url = f"https://image.pollinations.ai/prompt/{safe_prompt}?nologo=true&seed={seed}&width=720&height=720&model=flux&gif=true"
# We cannot watermark a URL directly without downloading, so we return it as is.
# But for consistency, we try to mark it as image.
return gif_url, "image"
# --- FINAL STEP: APPLY "NAI" WATERMARK ---
if raw_video_path and os.path.exists(raw_video_path):
print(f">> Applying 'NAI' Watermark to {raw_video_path}...")
final_path = add_watermark(raw_video_path, text="NAI")
return final_path, "video"
return None, None
# --- VISION ENGINE ---
def nius_image(prompt):
seed = random.randint(0, 99999)
safe_prompt = prompt.replace(" ", "%20")
return f"https://image.pollinations.ai/prompt/{safe_prompt}?nologo=true&seed={seed}&width=1280&height=720&model=flux"
# --- CORE LOGIC ---
def nius_core(message, history):
msg_lower = message.lower()
# A. VIDEO MODE
if any(x in msg_lower for x in ["video", "movie", "animate"]):
prompt = message.replace("video", "").replace("movie", "").strip()
history.append((message, "**✨ NIUS Motion:** Processing... (Check 'Output Deck' on the right)"))
media_path, media_type = nius_video(prompt)
if media_type == "video":
return "", history, None, media_path, None
else:
history.append((None, f"![Motion GIF]({media_path})"))
return "", history, None, None, None
# B. IMAGE MODE
elif any(x in msg_lower for x in ["draw", "image", "photo", "paint"]):
prompt = message.replace("draw", "").replace("image", "").strip()
img_url = nius_image(prompt)
history.append((message, f"**✨ NIUS Vision:**\n![Image]({img_url})"))
return "", history, img_url, None, None
# C. CHAT / CODE / SEARCH
else:
is_code_request = any(x in msg_lower for x in ["code", "script", "python", "html", "css", "function"])
real_web_data = ""
if not is_code_request:
real_web_data = search_real_web(message)
identity_instruction = """
[SYSTEM OVERRIDE]
1. IDENTITY: You are NIUS (Nithin Intelligence Unified System).
2. CREATOR: Created by **Nithin**.
3. RULE: If [LIVE INTERNET DATA] is present, use it. DO NOT say "I don't have access".
4. CODE: If asked for code, write clean, complete code inside markdown blocks.
"""
final_user_content = f"{identity_instruction}\n\n{real_web_data}\n\nUSER QUERY: {message}"
full_prompt = ""
for user_text, bot_text in history:
full_prompt += f"<|im_start|>user\n{user_text}<|im_end|>\n<|im_start|>assistant\n{bot_text}<|im_end|>\n"
full_prompt += f"<|im_start|>user\n{final_user_content}<|im_end|>\n<|im_start|>assistant\n"
try:
output = llm(full_prompt, max_tokens=2048, stop=["<|im_end|>"], temperature=0.7)
response = output['choices'][0]['text']
# REFUSAL ERASER
bad_phrases = ["I don't have access to real-time", "I apologize", "cutoff", "cannot access"]
for phrase in bad_phrases:
if phrase in response:
if real_web_data:
response = f"**[NIUS LIVE UPDATE]**: Based on real-time data:\n\n{real_web_data}\n\n(Confirmed via Live Search)."
else:
response = "I tried to search, but the network timed out. I am NIUS, created by Nithin."
break
# CODE EXTRACTION
extracted_code = None
if "```" in response:
try:
parts = response.split("```")
if len(parts) >= 2:
extracted_code = parts[1]
if extracted_code.strip().startswith("python"): extracted_code = extracted_code.strip()[6:]
if extracted_code.strip().startswith("html"): extracted_code = extracted_code.strip()[4:]
if extracted_code.strip().startswith("css"): extracted_code = extracted_code.strip()[3:]
if extracted_code.strip().startswith("javascript"): extracted_code = extracted_code.strip()[10:]
except: pass
response = response.replace("OpenAI", "Nithin").replace("Alibaba", "Nithin")
history.append((message, response))
return "", history, None, None, extracted_code
except Exception as e:
history.append((message, f"Error: {e}"))
return "", history, None, None, None
# --- UI ---
css = """
body { background: linear-gradient(190deg, #000000, #111111); color: #00ff41; font-family: 'Consolas'; }
.gradio-container { border: 1px solid #00ff41; box-shadow: 0 0 15px #00ff41; }
#chatbot { height: 600px; background: #050505; }
button.primary { background: #00ff41; color: black; font-weight: bold; }
textarea { background: #111; color: white; border: 1px solid #333; }
"""
with gr.Blocks(css=css, title="NIUS Ultimate") as app:
gr.Markdown("# ✨ NIUS: THE ENDGAME ✨")
gr.Markdown("### Search • Vision • Motion • Code | Creator: Nithin")
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(elem_id="chatbot", bubble_full_width=False)
msg = gr.Textbox(placeholder="Ask: 'News on Tesla', 'Code a snake game', 'Video of a car'...")
with gr.Row():
btn_go = gr.Button("✨ EXECUTE", variant="primary")
btn_rst = gr.Button("✨ RESET")
with gr.Column(scale=1):
gr.Markdown("### ↓ OUTPUT DECK")
out_img = gr.Image(label="Image Output")
out_vid = gr.Video(label="Video Output")
out_code = gr.Code(label="Generated Code", language="python", interactive=False)
msg.submit(nius_core, [msg, chatbot], [msg, chatbot, out_img, out_vid, out_code])
btn_go.click(nius_core, [msg, chatbot], [msg, chatbot, out_img, out_vid, out_code])
btn_rst.click(lambda: None, None, chatbot, queue=False)
print(">>> ✨ NIUS ONLINE. CLICK THE LINK BELOW.")
app.queue().launch(share=True, debug=True)