|
|
import transformers |
|
|
import torch |
|
|
import gradio as gr |
|
|
from datasets import load_dataset |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct" |
|
|
|
|
|
|
|
|
pipeline = transformers.pipeline( |
|
|
"text-generation", |
|
|
model=model_id, |
|
|
model_kwargs={"torch_dtype": torch.bfloat16}, |
|
|
device_map="auto", |
|
|
) |
|
|
|
|
|
|
|
|
dataset = load_dataset("quantumminds/cisco_cli_commands") |
|
|
|
|
|
|
|
|
def search_dataset(user_input): |
|
|
|
|
|
for entry in dataset['train']: |
|
|
if entry["command"].lower() in user_input.lower(): |
|
|
return f"**Command:** {entry['command']}\n\n**Description:** {entry['description']}\n\n**Example:** {entry['examples'][0]['example_command'] if 'examples' in entry else 'No example available'}" |
|
|
return None |
|
|
|
|
|
|
|
|
def generate_response(user_input): |
|
|
|
|
|
dataset_response = search_dataset(user_input) |
|
|
|
|
|
if dataset_response: |
|
|
return dataset_response |
|
|
|
|
|
|
|
|
messages = [ |
|
|
{"role": "system", "content": "You are a pirate chatbot who specializes in Cisco switch and router configurations"}, |
|
|
{"role": "user", "content": user_input}, |
|
|
] |
|
|
|
|
|
|
|
|
outputs = pipeline(messages, max_new_tokens=256) |
|
|
|
|
|
|
|
|
return outputs[0]["generated_text"] |
|
|
|
|
|
|
|
|
iface = gr.Interface( |
|
|
fn=generate_response, |
|
|
inputs=gr.Textbox(lines=2, placeholder="Enter your Cisco switch/router question here..."), |
|
|
outputs="text", |
|
|
title="Cisco Configuration Assistant", |
|
|
description="Ask the chatbot questions about Cisco switch/router configurations", |
|
|
) |
|
|
|
|
|
|
|
|
iface.launch() |