Update app.py
Browse files
app.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
from PIL import Image
|
|
|
|
| 3 |
|
| 4 |
st.set_page_config(page_title="FACTOID: FACtual enTailment fOr hallucInation Detection", layout="wide")
|
| 5 |
st.title('Welcome to :blue[FACTOID] ')
|
|
@@ -33,4 +34,50 @@ label_names = ["support", "neutral", "refute"]
|
|
| 33 |
prediction = {name: round(float(pred) * 100, 1) for pred, name in zip(prediction, label_names)}
|
| 34 |
print(prediction)
|
| 35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
st.write("Result:", prediction)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
from PIL import Image
|
| 3 |
+
import spacy
|
| 4 |
|
| 5 |
st.set_page_config(page_title="FACTOID: FACtual enTailment fOr hallucInation Detection", layout="wide")
|
| 6 |
st.title('Welcome to :blue[FACTOID] ')
|
|
|
|
| 34 |
prediction = {name: round(float(pred) * 100, 1) for pred, name in zip(prediction, label_names)}
|
| 35 |
print(prediction)
|
| 36 |
|
| 37 |
+
|
| 38 |
+
from sentence_transformers import CrossEncoder
|
| 39 |
+
model1 = CrossEncoder('cross-encoder/nli-deberta-v3-xsmall')
|
| 40 |
+
scores1 = model.predict([(sentence1, sentence2)])
|
| 41 |
+
|
| 42 |
+
#Convert scores to labels
|
| 43 |
+
label_mapping = ['contradiction', 'entailment', 'neutral']
|
| 44 |
+
labels = [label_mapping[score_max] for score_max in scores1.argmax(axis=1)]
|
| 45 |
+
labels
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def extract_person_names(sentence):
|
| 50 |
+
"""
|
| 51 |
+
Extract person names from a sentence using spaCy's named entity recognition.
|
| 52 |
+
|
| 53 |
+
Parameters:
|
| 54 |
+
sentence (str): Input sentence.
|
| 55 |
+
|
| 56 |
+
Returns:
|
| 57 |
+
list: List of person names extracted from the sentence.
|
| 58 |
+
"""
|
| 59 |
+
# Load English language model
|
| 60 |
+
nlp = spacy.load("en_core_web_sm")
|
| 61 |
+
|
| 62 |
+
# Process the sentence using spaCy
|
| 63 |
+
doc = nlp(sentence)
|
| 64 |
+
|
| 65 |
+
# Extract person names
|
| 66 |
+
person_names = [entity.text for entity in doc.ents if entity.label_ == 'PERSON']
|
| 67 |
+
|
| 68 |
+
return person_names[0]
|
| 69 |
+
|
| 70 |
+
person_name1 = extract_person_names(sentence1)
|
| 71 |
+
person_name2 = extract_person_names(sentence2)
|
| 72 |
+
|
| 73 |
st.write("Result:", prediction)
|
| 74 |
+
|
| 75 |
+
col1, col2 = st.beta_columns(2)
|
| 76 |
+
|
| 77 |
+
with col1:
|
| 78 |
+
st.write("Without Factual Entailment:",prediction)
|
| 79 |
+
|
| 80 |
+
with col2:
|
| 81 |
+
st.write("Factual Entailment:",labels)
|
| 82 |
+
st.write(f"{person_name1}::{person_name2}")
|
| 83 |
+
|