bonrix commited on
Commit
714b4d4
·
1 Parent(s): e386792

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -192,14 +192,14 @@ def gradio_interface1(sitemap_file):
192
 
193
 
194
  with gr.Blocks() as demo:
195
- gr.Markdown("A website URL is entered into a web crawling tool, which navigates through the site's pages and extracts text content from each page. This process enables users to gather information from multiple web pages quickly and efficiently, facilitating data analysis, research, or content extraction for various purposes.")
196
- with gr.Tab("Website Crawler"):
197
  text_input1 = gr.inputs.Textbox()
198
  progress_output = gr.outputs.Textbox(label="Progress")
199
  file_output1 = gr.outputs.File(label="Download Text")
200
  button1 = gr.Button("Website Crawler")
201
 
202
- with gr.Tab("Website Crawler"):
203
  text_input2 = gr.inputs.Textbox()
204
  file_output2 = gr.outputs.File(label="Download HTML File")
205
  button2 = gr.Button("Website Crawler")
 
192
 
193
 
194
  with gr.Blocks() as demo:
195
+ gr.Markdown("A website URL is entered into a web crawling tool, which navigates through the site's pages and extracts text content from each page. This process enables users to gather information from multiple web pages quickly and efficiently, facilitating data analysis, research, or content extraction for various purposes.First website crawler generates the Text file and another website crawler is generates a HTML File, Once the server responds, the crawling tool fetches the HTML content of the webpage. The HTML is then parsed to extract the structured information present in the page's elements, such as headings, paragraphs, links, images, etc.Overall, web crawling is a valuable technique for data acquisition, empowering individuals and businesses to harness the vast knowledge available on the internet for their specific needs and objectives. However, it is essential to conduct web crawling responsibly, adhering to ethical practices and respecting the terms of service of the websites being crawled to maintain a harmonious and respectful relationship between web crawlers and website owners.")
196
+ with gr.Tab("Website Crawler(To generate a text file)"):
197
  text_input1 = gr.inputs.Textbox()
198
  progress_output = gr.outputs.Textbox(label="Progress")
199
  file_output1 = gr.outputs.File(label="Download Text")
200
  button1 = gr.Button("Website Crawler")
201
 
202
+ with gr.Tab("Website Crawler(To generate a HTML file)"):
203
  text_input2 = gr.inputs.Textbox()
204
  file_output2 = gr.outputs.File(label="Download HTML File")
205
  button2 = gr.Button("Website Crawler")