andreska commited on
Commit
13992cf
·
verified ·
1 Parent(s): 2b41910

Updated to new test-model

Browse files
Files changed (1) hide show
  1. app.py +3 -11
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import streamlit as st
2
- #from transformers import pipeline
3
- from transformers import AutoProcessor, UdopForConditionalGeneration
4
  import docx
5
  #from datasets import load_dataset
6
 
@@ -12,12 +11,9 @@ def read_docx(file_path):
12
  text.append(paragraph.text)
13
  return "\n".join(text)
14
 
15
- # Load model and processor
16
- processor = AutoProcessor.from_pretrained("microsoft/udop-large", apply_ocr=False)
17
- model = UdopForConditionalGeneration.from_pretrained("microsoft/udop-large")
18
-
19
  #pipe = pipeline("question-answering")
20
  #pipe = pipeline("text-generation")
 
21
 
22
  st.title("Adrega AI Help")
23
  #dataset = load_dataset("andreska/adregadocs", split="test")
@@ -31,11 +27,7 @@ if st.button("Submit"):
31
  #text_inputs = f"Context: {context}\nQuestion: {user_input}\nAnswer:"
32
  #result = pipe(text_inputs, max_length=200, num_return_sequences=1)[0]['generated_text']
33
  #answer = result.split("Answer:")[1].strip()
34
- encoding = processor(context, user_input, return_tensors="pt")
35
-
36
- #answer = pipe(question=user_input, context=context)
37
- predicted_ids = model.generate(**encoding)
38
- answer = processor.batch_decode(predicted_ids, skip_special_tokens=True)[0]
39
 
40
  st.write(f"Adrega AI: {answer}")
41
  else:
 
1
  import streamlit as st
2
+ from transformers import pipeline
 
3
  import docx
4
  #from datasets import load_dataset
5
 
 
11
  text.append(paragraph.text)
12
  return "\n".join(text)
13
 
 
 
 
 
14
  #pipe = pipeline("question-answering")
15
  #pipe = pipeline("text-generation")
16
+ pipe = pipeline("question-answering", model="deepset/roberta-base-squad2")
17
 
18
  st.title("Adrega AI Help")
19
  #dataset = load_dataset("andreska/adregadocs", split="test")
 
27
  #text_inputs = f"Context: {context}\nQuestion: {user_input}\nAnswer:"
28
  #result = pipe(text_inputs, max_length=200, num_return_sequences=1)[0]['generated_text']
29
  #answer = result.split("Answer:")[1].strip()
30
+ answer = pipe(question=user_input, context=context)
 
 
 
 
31
 
32
  st.write(f"Adrega AI: {answer}")
33
  else: