import wikipedia import gradio as gr from gradio.mix import Parallel import requests def wikipediaScrap(article_name, wikipedia_language = "en"): if wikipedia_language: wikipedia.set_lang(wikipedia_language) et_page = wikipedia.page(article_name) title = et_page.title content = et_page.content page_url = et_page.url linked_pages = et_page.links text = content # Create and generate a word cloud image: #wordcloud = WordCloud(font_path="HelveticaWorld-Regular.ttf").generate(text) # Display the generated image: #plt.imshow(wordcloud, interpolation='bilinear') #plt.axis("off") return title, content, page_url, "\n". join(linked_pages) def answer(text): title,content,page_url,link = wikipediaScrap(text) result = list(filter(lambda x : x != '', content.split('\n\n'))) answer = [] for i in range(2): if len(result[i]) > 100: r = requests.post( url="https://hf.space/embed/jaimin/article-qa-summarizer/+/api/predict", json={"data": [result[i]]}, ) response = r.json() answer.append(response['data'][0]) return answer iface = gr.Interface(fn=answer, inputs=[gr.inputs.Textbox(lines=5)],outputs="text") iface.launch()