Shweta7171 commited on
Commit
0a9dbce
·
verified ·
1 Parent(s): 083a5d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -19
app.py CHANGED
@@ -1,34 +1,39 @@
1
- # app.py
2
  import streamlit as st
3
- import utils
4
-
5
- import sys
6
- sys.path.append('/path/to/utils.py')
7
-
8
 
9
  def dataframe_to_csv_download(dataframe):
 
10
  csv = dataframe.to_csv(index=False)
 
 
 
 
 
 
 
 
 
 
11
  st.download_button(
12
  label="Download CSV File",
13
- data=csv,
14
- file_name="ScrappedData.csv",
15
  mime="text/csv",
16
  )
17
 
18
- def main():
 
 
19
  st.title('LinkedIn Job Scraper')
20
  st.write('This app scrapes LinkedIn for job listings.')
21
- url = st.text_input("Enter the URL here..")
22
 
23
  if st.button("Get Records") and url:
24
- with st.spinner("Scraping the given URL..."):
25
  scrap_df = utils.scrap_data(url)
 
 
26
 
27
- if scrap_df is not None:
28
- st.dataframe(scrap_df)
29
- dataframe_to_csv_download(scrap_df)
30
- else:
31
- st.error("Failed to retrieve data from the URL. Please check the URL and try again.")
32
-
33
- if __name__ == '__main__':
34
- main()
 
1
+ import utils as utils
2
  import streamlit as st
3
+ from io import StringIO
4
+ from datetime import datetime, date
 
 
 
5
 
6
  def dataframe_to_csv_download(dataframe):
7
+ # Convert the DataFrame to a CSV string
8
  csv = dataframe.to_csv(index=False)
9
+ csv_bytes = csv.encode('utf-8')
10
+
11
+ # Create a buffer to hold the CSV string in bytes
12
+ buf = StringIO()
13
+ buf.write(csv)
14
+ buf.seek(0)
15
+
16
+ today = datetime.now().strftime("%Y-%m-%d %H:%M")
17
+
18
+ # Create a download button in the Streamlit app
19
  st.download_button(
20
  label="Download CSV File",
21
+ data=csv_bytes,
22
+ file_name=f"ScrappedData_{today}.csv",
23
  mime="text/csv",
24
  )
25
 
26
+
27
+
28
+ if __name__=='__main__':
29
  st.title('LinkedIn Job Scraper')
30
  st.write('This app scrapes LinkedIn for job listings.')
31
+ url = st.text_area("Enter the url here..")
32
 
33
  if st.button("Get Records") and url:
34
+ with st.spinner("Scrapping the given URL"):
35
  scrap_df = utils.scrap_data(url)
36
+ st.dataframe(scrap_df)
37
+ dataframe_to_csv_download(scrap_df)
38
 
39
+ st.write('')