Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -24,7 +24,7 @@ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
|
| 24 |
|
| 25 |
# 🔐 Lista de topicuri din dataset (poți ajusta manual dacă vrei):
|
| 26 |
covered_topics = {
|
| 27 |
-
"ospf", "bgp", "vxlan", "evpn", "network design", "acl", "routing",
|
| 28 |
"spine", "leaf", "underlay", "overlay", "mpls", "qos", "firewall",
|
| 29 |
"vpn", "vlan", "subnet", "cidr"
|
| 30 |
}
|
|
@@ -32,13 +32,13 @@ covered_topics = {
|
|
| 32 |
def chat(user_input):
|
| 33 |
# Verificăm dacă întrebarea conține topicuri cunoscute
|
| 34 |
if not any(topic in user_input.lower() for topic in covered_topics):
|
| 35 |
-
return "
|
| 36 |
|
| 37 |
prompt = f"User: {user_input}\nAI:"
|
| 38 |
-
response = pipe(prompt, max_new_tokens=
|
| 39 |
return response[len(prompt):].strip()
|
| 40 |
|
| 41 |
-
iface = gr.Interface(fn=chat, inputs="text", outputs="text", title="
|
| 42 |
|
| 43 |
if __name__ == "__main__":
|
| 44 |
iface.launch()
|
|
|
|
| 24 |
|
| 25 |
# 🔐 Lista de topicuri din dataset (poți ajusta manual dacă vrei):
|
| 26 |
covered_topics = {
|
| 27 |
+
"ospf", "bgp", "eigrp", "vxlan", "evpn", "network design", "acl", "routing",
|
| 28 |
"spine", "leaf", "underlay", "overlay", "mpls", "qos", "firewall",
|
| 29 |
"vpn", "vlan", "subnet", "cidr"
|
| 30 |
}
|
|
|
|
| 32 |
def chat(user_input):
|
| 33 |
# Verificăm dacă întrebarea conține topicuri cunoscute
|
| 34 |
if not any(topic in user_input.lower() for topic in covered_topics):
|
| 35 |
+
return "I am sorry, I was not fine tuned with this data."
|
| 36 |
|
| 37 |
prompt = f"User: {user_input}\nAI:"
|
| 38 |
+
response = pipe(prompt, max_new_tokens=128, do_sample=True, temperature=0.1)[0]["generated_text"]
|
| 39 |
return response[len(prompt):].strip()
|
| 40 |
|
| 41 |
+
iface = gr.Interface(fn=chat, inputs="text", outputs="text", title="Eduard's 1st virtual Architect")
|
| 42 |
|
| 43 |
if __name__ == "__main__":
|
| 44 |
iface.launch()
|