Spaces:
Sleeping
Sleeping
| # load dataset | |
| import pandas as pd | |
| df = pd.read_csv('sentiment_data.csv') | |
| texts = df['text'].astype(str) | |
| # Load NER model | |
| import spacy | |
| model = spacy.load('en_core_web_lg') | |
| # Extract entities | |
| result = [] | |
| for i in texts: | |
| doc = model(i) | |
| entities = [(ent.text,ent.label_) for ent in doc.ents] | |
| result.append({'Text':i,'Entity':entities}) | |
| result_df = pd.DataFrame(result) | |
| # Entity visualization | |
| from spacy import displacy | |
| for t in texts[:5]: | |
| doc = model(t) | |
| displacy.render(doc,style='ent') | |
| from collections import Counter | |
| all_entites = [ent for ents in result_df['Entity'] for ent in ents] | |
| print(all_entites) | |
| labels = [label for text, label in all_entites] | |
| Counter(labels).most_common(1) | |
| def ext_ent(sentence): | |
| doc = model(sentence) | |
| output = '' | |
| for i in doc.ents: | |
| output += f'{i.text} - {i.label_}\n' | |
| return output | |
| import gradio as gr | |
| demo = gr.Interface(fn=ext_ent,inputs='text',outputs='text',title='Extract Entities') | |
| demo.launch() |