Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import torch | |
| # Load the pre-trained model and tokenizer | |
| model_name = "distilbert-base-uncased" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForSequenceClassification.from_pretrained(model_name, num_labels=5) # Adjust num_labels | |
| # Define the function to get article suggestions | |
| def suggest_articles(case_details): | |
| inputs = tokenizer(case_details, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| prediction = outputs.logits.argmax(dim=1).item() | |
| return f"Suggested Article ID: {prediction}" | |
| # Build the Gradio interface | |
| interface = gr.Interface( | |
| fn=suggest_articles, | |
| inputs="text", | |
| outputs="text", | |
| title="Knowledge Article Suggestion", | |
| description="Enter case details to get relevant article suggestions." | |
| ) | |
| interface.launch() | |