dindizz commited on
Commit
146f8eb
·
verified ·
1 Parent(s): 9aecc63

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -77
app.py CHANGED
@@ -1,77 +1,77 @@
1
- import requests
2
- from bs4 import BeautifulSoup
3
- import openai
4
- import gradio as gr
5
- import os
6
- from dotenv import load_dotenv
7
-
8
- # Load environment variables from .env file
9
- load_dotenv()
10
- openai.api_key = os.getenv("OPENAI_API_KEY")
11
-
12
-
13
- # Function to scrape content from a URL
14
- def scrape_content(url):
15
- response = requests.get(url)
16
- soup = BeautifulSoup(response.content, 'html.parser')
17
-
18
- # Example of extracting title and body content - modify based on actual structure of the websites
19
- title = soup.find('title').get_text()
20
- paragraphs = soup.find_all('p')
21
- content = '\n'.join([para.get_text() for para in paragraphs])
22
-
23
- return title, content
24
-
25
-
26
- # Function to create newsletter using OpenAI
27
- def create_newsletter(contents):
28
- prompt = "Create a newsletter with the following content:\n\n"
29
- for content in contents:
30
- title, body, url = content
31
- prompt += f"Title: {title}\nURL: {url}\n\n{body}\n\n"
32
-
33
- response = openai.chat.completions.create(
34
- model="gpt-4",
35
- messages=[
36
- {"role": "system", "content": "You are a helpful assistant."},
37
- {"role": "user", "content": prompt}
38
- ]
39
- )
40
-
41
- # newsletter = response['choices'][0]['message']['content'].strip()
42
- newsletter = response.choices[0].message.content.strip()
43
- return newsletter
44
-
45
-
46
- # Function to process URLs and generate the newsletter
47
- def process_urls(url1, url2, url3, url4, url5):
48
- urls = [url for url in [url1, url2, url3, url4, url5] if url]
49
-
50
- if not urls:
51
- return "No URLs provided."
52
-
53
- contents = []
54
- for url in urls:
55
- title, content = scrape_content(url)
56
- contents.append((title, content, url))
57
-
58
- newsletter = create_newsletter(contents)
59
- return newsletter
60
-
61
-
62
- # Gradio interface
63
- iface = gr.Interface(
64
- fn=process_urls,
65
- inputs=[
66
- gr.Textbox(label="URL 1"),
67
- gr.Textbox(label="URL 2"),
68
- gr.Textbox(label="URL 3"),
69
- gr.Textbox(label="URL 4"),
70
- gr.Textbox(label="URL 5")
71
- ],
72
- outputs="html",
73
- title="Newsletter Generator",
74
- description="Enter up to 5 URLs to generate a newsletter."
75
- )
76
-
77
- iface.launch()
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import openai
4
+ import gradio as gr
5
+ import os
6
+ from dotenv import load_dotenv
7
+
8
+ # Load environment variables from .env file
9
+ load_dotenv()
10
+ openai.api_key = os.getenv("OPENAI_API_KEY")
11
+
12
+
13
+ # Function to scrape content from a URL
14
+ def scrape_content(url):
15
+ response = requests.get(url)
16
+ soup = BeautifulSoup(response.content, 'html.parser')
17
+
18
+ # Example of extracting title and body content - modify based on actual structure of the websites
19
+ title = soup.find('title').get_text()
20
+ paragraphs = soup.find_all('p')
21
+ content = '\n'.join([para.get_text() for para in paragraphs])
22
+
23
+ return title, content
24
+
25
+
26
+ # Function to create newsletter using OpenAI
27
+ def create_newsletter(contents):
28
+ prompt = "Create a newsletter with the following content:\n\n"
29
+ for content in contents:
30
+ title, body, url = content
31
+ prompt += f"Title: {title}\nURL: {url}\n\n{body}\n\n"
32
+
33
+ response = openai.chat.completions.create(
34
+ model="gpt-4o",
35
+ messages=[
36
+ {"role": "system", "content": "You are a helpful assistant."},
37
+ {"role": "user", "content": prompt}
38
+ ]
39
+ )
40
+
41
+ # newsletter = response['choices'][0]['message']['content'].strip()
42
+ newsletter = response.choices[0].message.content.strip()
43
+ return newsletter
44
+
45
+
46
+ # Function to process URLs and generate the newsletter
47
+ def process_urls(url1, url2, url3, url4, url5):
48
+ urls = [url for url in [url1, url2, url3, url4, url5] if url]
49
+
50
+ if not urls:
51
+ return "No URLs provided."
52
+
53
+ contents = []
54
+ for url in urls:
55
+ title, content = scrape_content(url)
56
+ contents.append((title, content, url))
57
+
58
+ newsletter = create_newsletter(contents)
59
+ return newsletter
60
+
61
+
62
+ # Gradio interface
63
+ iface = gr.Interface(
64
+ fn=process_urls,
65
+ inputs=[
66
+ gr.Textbox(label="URL 1"),
67
+ gr.Textbox(label="URL 2"),
68
+ gr.Textbox(label="URL 3"),
69
+ gr.Textbox(label="URL 4"),
70
+ gr.Textbox(label="URL 5")
71
+ ],
72
+ outputs="html",
73
+ title="Newsletter Generator",
74
+ description="Enter up to 5 URLs to generate a newsletter."
75
+ )
76
+
77
+ iface.launch()