Update app.py
Browse files
app.py
CHANGED
|
@@ -1,19 +1,19 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
-
|
| 4 |
-
"""
|
| 5 |
-
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
| 6 |
-
"""
|
| 7 |
-
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
| 8 |
-
|
| 9 |
from model import load_model, load_tokenizer
|
| 10 |
from utils import clean_output, get_shap_values
|
| 11 |
import torch
|
| 12 |
import shap
|
| 13 |
import matplotlib.pyplot as plt
|
| 14 |
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
model = load_model()
|
|
|
|
| 17 |
|
| 18 |
def gradio_generate(context, num_questions, max_length):
|
| 19 |
input_prompt = f"generate question: {context.strip()}"
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
from model import load_model, load_tokenizer
|
| 4 |
from utils import clean_output, get_shap_values
|
| 5 |
import torch
|
| 6 |
import shap
|
| 7 |
import matplotlib.pyplot as plt
|
| 8 |
|
| 9 |
+
"""
|
| 10 |
+
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
| 11 |
+
"""
|
| 12 |
+
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
| 13 |
+
|
| 14 |
+
# Load model and tokenizer once
|
| 15 |
model = load_model()
|
| 16 |
+
tokenizer = load_tokenizer()
|
| 17 |
|
| 18 |
def gradio_generate(context, num_questions, max_length):
|
| 19 |
input_prompt = f"generate question: {context.strip()}"
|