lu.jin commited on
Commit
d5cf74c
·
1 Parent(s): 9eef176

重构代码以实现命名实体识别功能,替换问候函数,添加 Gradio 接口支持

Browse files
Files changed (1) hide show
  1. app.py +36 -7
app.py CHANGED
@@ -1,14 +1,43 @@
1
  import gradio as gr
2
  import spaces
3
  import torch
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
- zero = torch.Tensor([0]).cuda()
6
- print(zero.device) # <-- 'cpu' 🤔
7
 
8
  @spaces.GPU
9
- def greet(n):
10
- print(zero.device) # <-- 'cuda:0' 🤗
11
- return f"Hello {zero + n} Tensor"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
- demo = gr.Interface(fn=greet, inputs=gr.Number(), outputs=gr.Text())
14
- demo.launch()
 
1
  import gradio as gr
2
  import spaces
3
  import torch
4
+ from transformers import AutoModelForTokenClassification, AutoTokenizer, pipeline
5
+
6
+ MODEL_ID = "lujin/search-ner-lora-model"
7
+ DEVICE = 0 if torch.cuda.is_available() else -1
8
+
9
+ # Load model and tokenizer once so the interface stays responsive.
10
+ model = AutoModelForTokenClassification.from_pretrained(MODEL_ID)
11
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
12
+
13
+ ner_pipe = pipeline(
14
+ "token-classification",
15
+ model=model,
16
+ tokenizer=tokenizer,
17
+ aggregation_strategy="simple",
18
+ device=DEVICE,
19
+ )
20
 
 
 
21
 
22
  @spaces.GPU
23
+ def run_ner(text: str):
24
+ """Return aggregated entity predictions for the provided text."""
25
+ text = (text or "").strip()
26
+ if not text:
27
+ return []
28
+ return ner_pipe(text)
29
+
30
+
31
+ demo = gr.Interface(
32
+ fn=run_ner,
33
+ inputs=gr.Textbox(lines=4, label="Input Text"),
34
+ outputs=gr.JSON(label="Entities"),
35
+ title="Search NER",
36
+ description="Named-entity recognition using lujin/search-ner-lora-model.",
37
+ examples=[
38
+ ["OpenAI总部位于旧金山。"],
39
+ ["小明毕业于清华大学,现在在阿里巴巴工作。"],
40
+ ],
41
+ )
42
 
43
+ demo.launch()