Ravi21 commited on
Commit
eafd233
·
1 Parent(s): 06ea84b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -47
app.py CHANGED
@@ -32,53 +32,7 @@ def predict(data):
32
  # Create the Gradio interface
33
  iface = gr.Interface(
34
  fn=predict,
35
- inputs=gr.inputs.JSON(),
36
- outputs=gr.outputs.Label(num_top_classes=3),
37
- live=True,
38
- examples=[
39
- {"prompt": "This is the prompt", "A": "Option A text", "B": "Option B text", "C": "Option C text", "D": "Option D text", "E": "Option E text"}
40
- ],
41
- title="LLM Science Exam Demo",
42
- description="Enter the prompt and options (A to E) below and get predictions.",
43
- )
44
-
45
- # Run the interface
46
- iface.launch()
47
- import pandas as pd
48
- import numpy as np
49
- import gradio as gr
50
- import torch
51
- from transformers import AutoModelForMultipleChoice, AutoTokenizer
52
- from huggingface_hub import hf_hub_url, Repository
53
-
54
- # Load the model and tokenizer
55
- model_path = "my_model"
56
- model = AutoModelForMultipleChoice.from_pretrained(model_path)
57
- tokenizer = AutoTokenizer.from_pretrained(model_path)
58
-
59
- # Define the preprocessing function
60
- def preprocess(sample):
61
- first_sentences = [sample["prompt"]] * 5
62
- second_sentences = [sample[option] for option in "ABCDE"]
63
- tokenized_sentences = tokenizer(first_sentences, second_sentences, truncation=True, padding=True, return_tensors="pt")
64
- sample["input_ids"] = tokenized_sentences["input_ids"]
65
- sample["attention_mask"] = tokenized_sentences["attention_mask"]
66
- return sample
67
-
68
- # Define the prediction function
69
- def predict(data):
70
- inputs = torch.stack(data["input_ids"])
71
- masks = torch.stack(data["attention_mask"])
72
- with torch.no_grad():
73
- logits = model(inputs, attention_mask=masks).logits
74
- predictions_as_ids = torch.argsort(-logits, dim=1)
75
- answers = np.array(list("ABCDE"))[predictions_as_ids.tolist()]
76
- return ["".join(i) for i in answers[:, :3]]
77
-
78
- # Create the Gradio interface
79
- iface = gr.Interface(
80
- fn=predict,
81
- inputs=gr.inputs.JSON(),
82
  outputs=gr.outputs.Label(num_top_classes=3),
83
  live=True,
84
  examples=[
 
32
  # Create the Gradio interface
33
  iface = gr.Interface(
34
  fn=predict,
35
+ inputs=gr.inputs.Input(type="json"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  outputs=gr.outputs.Label(num_top_classes=3),
37
  live=True,
38
  examples=[