File size: 1,218 Bytes
75e2f70
07b84ab
 
0dccfd6
75e2f70
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4c29a7f
75e2f70
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import wikipedia
import gradio as gr
from gradio.mix import Parallel
import requests

def wikipediaScrap(article_name, wikipedia_language = "en"):
  if wikipedia_language:
    wikipedia.set_lang(wikipedia_language)

  et_page = wikipedia.page(article_name)
  title = et_page.title
  content = et_page.content
  page_url = et_page.url
  linked_pages = et_page.links
  
  text = content

  # Create and generate a word cloud image:
  #wordcloud = WordCloud(font_path="HelveticaWorld-Regular.ttf").generate(text)

  # Display the generated image:
  #plt.imshow(wordcloud, interpolation='bilinear')
  #plt.axis("off")
  return title, content, page_url, "\n". join(linked_pages)
  
def answer(text):
  title,content,page_url,link = wikipediaScrap(text)
  result = list(filter(lambda x : x != '', content.split('\n\n')))
  answer = []
  for i in range(2):
    if len(result[i]) > 100:
      r = requests.post(
          url="https://hf.space/embed/jaimin/article-qa-summarizer/+/api/predict",
          json={"data": [result[i]]},
      )
      response = r.json()
      answer.append(response['data'][0])
  return answer
   
iface = gr.Interface(fn=answer, inputs=[gr.inputs.Textbox(lines=5)],outputs="text")
iface.launch()