Spaces:
Sleeping
Sleeping
File size: 1,693 Bytes
4ec7d16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import spacy
import pandas as pd
from transformers import pipeline
import gradio as gr
import subprocess
import sys
# Versuche, spaCy zu laden – falls nicht vorhanden, lade es herunter
try:
nlp = spacy.load("de_core_news_sm")
except OSError:
subprocess.run([sys.executable, "-m", "spacy", "download", "de_core_news_sm"])
nlp = spacy.load("de_core_news_sm")
# Lade Sentimentmodell, cache lokal
sentiment_analyzer = pipeline(
"sentiment-analysis",
model="oliverguhr/german-sentiment-bert"
)
def link_entities_with_sentiment(text):
doc = nlp(text)
sentences = list(doc.sents)
entity_sentiment_links = []
for i, sentence in enumerate(sentences):
entities = [(ent.text, ent.label_) for ent in sentence.ents]
sentiment = sentiment_analyzer(sentence.text)[0]
for ent_text, ent_label in entities:
entity_sentiment_links.append({
"Entity": ent_text,
"Label": ent_label,
"Sentence Index": i,
"Sentiment Label": sentiment["label"],
"Sentiment Score": round(sentiment["score"], 3)
})
df = pd.DataFrame(entity_sentiment_links)
return df if not df.empty else "Keine Entitäten gefunden."
# Gradio Interface
demo = gr.Interface(
fn=link_entities_with_sentiment,
inputs=gr.Textbox(lines=10, label="Gib deinen deutschen Text ein"),
outputs=gr.Dataframe(label="Entitäten mit Sentiment"),
title="NER + Sentiment Analyse (Deutsch)",
description="Diese Demo verknüpft erkannte Entitäten mit Sentiment-Labels aus dem gleichen Satz.",
allow_flagging="manual",
)
if __name__ == "__main__":
demo.launch() |