ckharche commited on
Commit
cc814c9
·
verified ·
1 Parent(s): 1a0a985

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -16
app.py CHANGED
@@ -6,14 +6,14 @@ import textstat
6
 
7
  @st.cache_resource
8
  def load_model():
9
- # Load to CPU explicitly
10
  base = BartForConditionalGeneration.from_pretrained(
11
  "facebook/bart-large-cnn",
12
  torch_dtype=torch.float32,
13
- device_map=None # Don't use auto
14
  )
15
 
16
- model = PeftModel.from_pretrained(base, "ckharche/Legaleaze")
 
17
  tokenizer = BartTokenizer.from_pretrained("facebook/bart-large-cnn")
18
 
19
  model.to("cpu")
@@ -29,18 +29,36 @@ def simplify(text, tokenizer, model):
29
 
30
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
31
 
32
- # Rest of your UI code...
33
- st.title("Legaleaze")
34
- tokenizer, model = load_model()
35
 
36
- col1, col2 = st.columns(2)
37
- with col1:
38
- text = st.text_area("Complex Legal Text", height=300)
39
- if st.button("Simplify"):
40
- with st.spinner("Processing (20-30s on CPU)..."):
41
- result = simplify(text, tokenizer, model)
42
- st.session_state['result'] = result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
- with col2:
45
- if 'result' in st.session_state:
46
- st.text_area("Simplified", st.session_state['result'], height=300)
 
6
 
7
  @st.cache_resource
8
  def load_model():
 
9
  base = BartForConditionalGeneration.from_pretrained(
10
  "facebook/bart-large-cnn",
11
  torch_dtype=torch.float32,
12
+ device_map=None
13
  )
14
 
15
+ # Load from local checkpoint folder
16
+ model = PeftModel.from_pretrained(base, "./checkpoint")
17
  tokenizer = BartTokenizer.from_pretrained("facebook/bart-large-cnn")
18
 
19
  model.to("cpu")
 
29
 
30
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
31
 
32
+ st.set_page_config(page_title="Legaleaze", layout="wide")
33
+ st.title("âš–ï¸ Legaleaze: Legal Text Simplifier")
34
+ st.caption("BART-Large + LoRA | 121k steps on asylum cases")
35
 
36
+ try:
37
+ tokenizer, model = load_model()
38
+
39
+ col1, col2 = st.columns(2)
40
+
41
+ with col1:
42
+ st.subheader("Complex Legal Text")
43
+ text = st.text_area("", height=300, placeholder="Paste legal text here...")
44
+ btn = st.button("Simplify", type="primary", use_container_width=True)
45
+
46
+ with col2:
47
+ st.subheader("Simplified Output")
48
+ if btn and text.strip():
49
+ with st.spinner("Simplifying (30s on CPU)..."):
50
+ result = simplify(text, tokenizer, model)
51
+ st.text_area("", value=result, height=300, disabled=True)
52
+
53
+ st.divider()
54
+ m1, m2, m3 = st.columns(3)
55
+ orig = textstat.flesch_kincaid_grade(text)
56
+ simp = textstat.flesch_kincaid_grade(result)
57
+ m1.metric("Original FKGL", f"{orig:.1f}")
58
+ m2.metric("Simplified FKGL", f"{simp:.1f}")
59
+ m3.metric("Improvement", f"{((orig-simp)/orig*100):.0f}%")
60
+ else:
61
+ st.info("Simplified text appears here")
62
 
63
+ except Exception as e:
64
+ st.error(f"Error: {e}")