eduard76 commited on
Commit
f9ed50d
·
verified ·
1 Parent(s): 239b487

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -2
app.py CHANGED
@@ -1,7 +1,6 @@
1
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline, BitsAndBytesConfig
2
  import torch
3
  import gradio as gr
4
- from sentence_transformers import SentenceTransformer, util # for similarity gating
5
 
6
  model_id = "eduard76/Llama3-8b-good-new"
7
 
@@ -23,7 +22,18 @@ model = AutoModelForCausalLM.from_pretrained(
23
 
24
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
25
 
 
 
 
 
 
 
 
26
  def chat(user_input):
 
 
 
 
27
  prompt = f"User: {user_input}\nAI:"
28
  response = pipe(prompt, max_new_tokens=200, do_sample=True, temperature=0.7)[0]["generated_text"]
29
  return response[len(prompt):].strip()
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig, pipeline
2
  import torch
3
  import gradio as gr
 
4
 
5
  model_id = "eduard76/Llama3-8b-good-new"
6
 
 
22
 
23
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
24
 
25
+ # 🔐 Lista de topicuri din dataset (poți ajusta manual dacă vrei):
26
+ covered_topics = {
27
+ "ospf", "bgp", "vxlan", "evpn", "network design", "acl", "routing",
28
+ "spine", "leaf", "underlay", "overlay", "mpls", "qos", "firewall",
29
+ "vpn", "vlan", "subnet", "cidr"
30
+ }
31
+
32
  def chat(user_input):
33
+ # Verificăm dacă întrebarea conține topicuri cunoscute
34
+ if not any(topic in user_input.lower() for topic in covered_topics):
35
+ return "Îmi pare rău, nu am suficiente date despre acest subiect pentru a răspunde corect."
36
+
37
  prompt = f"User: {user_input}\nAI:"
38
  response = pipe(prompt, max_new_tokens=200, do_sample=True, temperature=0.7)[0]["generated_text"]
39
  return response[len(prompt):].strip()