alikhantoleberdyev commited on
Commit
1d4cee9
Β·
1 Parent(s): c706fc8

build version:1.8

Browse files
Files changed (2) hide show
  1. NLP_History_and_Facts.txt +1 -1
  2. app.py +22 -17
NLP_History_and_Facts.txt CHANGED
@@ -22,7 +22,7 @@ Natural Language Processing (NLP) has its roots in the early days of computing a
22
 
23
  5. **2000s - The Neural Era**:
24
  - The introduction of neural networks revolutionized NLP tasks.
25
- - Word embeddings, such as Word2Vec (2013) and GloVe, created dense, continuous vector representations of words, capturing semantic relationships.
26
 
27
  6. **2010s - The Transformer Revolution**:
28
  - The paper "Attention Is All You Need" (2017) introduced the Transformer architecture, which addressed the limitations of recurrent neural networks (RNNs).
 
22
 
23
  5. **2000s - The Neural Era**:
24
  - The introduction of neural networks revolutionized NLP tasks.
25
+ - Word embeddings, such as Word2Vec and GloVe, created dense, continuous vector representations of words, capturing semantic relationships.
26
 
27
  6. **2010s - The Transformer Revolution**:
28
  - The paper "Attention Is All You Need" (2017) introduced the Transformer architecture, which addressed the limitations of recurrent neural networks (RNNs).
app.py CHANGED
@@ -5,9 +5,11 @@ from transformers import AutoModelForSeq2SeqLM , AutoTokenizer, TranslationPipel
5
 
6
 
7
 
8
- st.title('NLP Question Answering πŸ•΅οΈβ€β™‚οΈ')
9
  st.write('Ask a question about NLP, and I will answer based on the provided context! πŸ”„')
10
- user_input = st.text_area("enter question about NLP", "What is tokenizer?")
 
 
11
  @st.cache_resource
12
  def load_model():
13
  print("Loading model...")
@@ -15,21 +17,24 @@ def load_model():
15
 
16
 
17
  dunno_answerer = load_model()
18
- with open('NLP_History_and_Facts.txt', 'r') as file:
19
- context = file.read()
20
-
21
- if st.button("Answer!"):
22
- if user_input.strip():
23
- # Generate an answer using the model
24
- result = dunno_answerer(question=user_input, context=context)
25
-
26
- # Display the answer and additional information
27
- st.write(f"**Answer:** {result['answer']}")
28
- st.write(f"**Confidence Score:** {round(result['score'], 4)}")
29
- st.write(f"**Answer Start Position:** {result['start']}")
30
- st.write(f"**Answer End Position:** {result['end']}")
31
- else:
32
- st.write("Please enter a valid question!")
 
 
 
33
 
34
  # x = st.slider('Select a value')
35
  # st.write(x, 'squared is', x * x)
 
5
 
6
 
7
 
8
+ st.title('Question Answering πŸ•΅οΈβ€β™‚οΈ')
9
  st.write('Ask a question about NLP, and I will answer based on the provided context! πŸ”„')
10
+ context_input = st.text_area("please provice some context", "Many NLP tasks are now benchmarked using datasets like GLUE and SuperGLUE. Multilingual NLP models like mBERT support multiple languages in a single framework.")
11
+
12
+ # question_input = st.text_area("enter question about NLP", "what model support multilingual nlp?")
13
  @st.cache_resource
14
  def load_model():
15
  print("Loading model...")
 
17
 
18
 
19
  dunno_answerer = load_model()
20
+ # with open('NLP_History_and_Facts.txt', 'r') as file:
21
+ # context = file.read()
22
+
23
+
24
+ if context_input.strip():
25
+ question_input = st.text_area("enter question about NLP", "what model support multilingual nlp?")
26
+ if st.button("Answer!"):
27
+ if question_input.strip():
28
+ # Generate an answer using the model
29
+ result = dunno_answerer(question=question_input, context=context_input)
30
+
31
+ # Display the answer and additional information
32
+ st.write(f"**Answer:** {result['answer']}")
33
+ st.write(f"**Confidence Score:** {round(result['score'], 4)}")
34
+ st.write(f"**Answer Start Position:** {result['start']}")
35
+ st.write(f"**Answer End Position:** {result['end']}")
36
+ else:
37
+ st.write("Please enter a valid question!")
38
 
39
  # x = st.slider('Select a value')
40
  # st.write(x, 'squared is', x * x)