tarnava commited on
Commit
aff1aad
·
verified ·
1 Parent(s): 58ae25b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -13
app.py CHANGED
@@ -1,16 +1,15 @@
1
  import os
2
- os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1" # faster download
3
 
4
  import torch
5
  from transformers import AutoModelForCausalLM, AutoTokenizer
6
  from peft import PeftModel
7
  import gradio as gr
8
 
9
- # --- Models Load (CPU Only) ---
10
  BASE_MODEL = "Qwen/Qwen2.5-1.5B"
11
  LORA_ADAPTER = "modular-ai/qwen"
12
 
13
- print("Loading base model on CPU... (ye 1-2 min lagega pehli baar)")
14
 
15
  base_model = AutoModelForCausalLM.from_pretrained(
16
  BASE_MODEL,
@@ -49,21 +48,18 @@ def ask_kant(message, history):
49
 
50
  # --- Gradio UI ---
51
  with gr.Blocks() as demo:
52
- gr.Markdown("# Kant AI – Qwen2.5-1.5B LoRA")
53
- gr.Markdown("**Zero GPU | Free | Live Demo** \nPoochein koi bhi sawal, *Immanuel Kant* jawab denge!")
54
 
55
- chatbot = gr.ChatInterface(
56
  fn=ask_kant,
57
  examples=[
58
  "What is freedom?",
59
  "Kya hai swatantrata?",
60
- "Explain categorical imperative"
61
  ],
62
- cache_examples=False,
63
- submit_btn="Ask Kant",
64
  )
65
-
66
- gr.Markdown("---\n*Model: Qwen2.5-1.5B + LoRA | CPU Only | ~8-12 sec per reply*")
67
 
68
- # --- Ye Line Fix Karegi Error ---
69
- demo.launch(share=False, server_name="0.0.0.0", server_port=7860)
 
1
  import os
 
2
 
3
  import torch
4
  from transformers import AutoModelForCausalLM, AutoTokenizer
5
  from peft import PeftModel
6
  import gradio as gr
7
 
8
+ # --- Load Models (CPU Only) ---
9
  BASE_MODEL = "Qwen/Qwen2.5-1.5B"
10
  LORA_ADAPTER = "modular-ai/qwen"
11
 
12
+ print("Loading base model on CPU... (pehli baar 2-3 min)")
13
 
14
  base_model = AutoModelForCausalLM.from_pretrained(
15
  BASE_MODEL,
 
48
 
49
  # --- Gradio UI ---
50
  with gr.Blocks() as demo:
51
+ gr.Markdown("# Kant AI – Live Chatbot")
52
+ gr.Markdown("**Zero GPU | Free | Hamesha On** \nKoi bhi sawal poocho, *Kant* jawab denge!")
53
 
54
+ gr.ChatInterface(
55
  fn=ask_kant,
56
  examples=[
57
  "What is freedom?",
58
  "Kya hai swatantrata?",
59
+ "Categorical imperative kya hai?"
60
  ],
61
+
 
62
  )
 
 
63
 
64
+ # --- YE LINE FIX KAREGI SAB KUCH ---
65
+ demo.launch(share=True)