shreyasiv commited on
Commit
0f077c5
1 Parent(s): cbb9c8b

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +56 -0
  2. requirements.txt +60 -0
app.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from bs4 import BeautifulSoup
4
+ import re
5
+
6
+ # Function to scrape only visible text from the given URL
7
+ def scrape_visible_text_from_url(url):
8
+ try:
9
+ response = requests.get(url)
10
+ response.raise_for_status()
11
+ soup = BeautifulSoup(response.content, 'html.parser')
12
+
13
+ # Remove script, style, and other non-visible tags
14
+ for tag in soup(["script", "style", "meta", "link", "noscript", "header", "footer", "aside", "nav", "img"]):
15
+ tag.extract()
16
+
17
+ # Get the header content
18
+ header_content = soup.find("header")
19
+ header_text = header_content.get_text() if header_content else ""
20
+
21
+ # Get the paragraph content
22
+ paragraph_content = soup.find_all("p")
23
+ paragraph_text = " ".join([p.get_text() for p in paragraph_content])
24
+
25
+ # Combine header and paragraph text
26
+ visible_text = f"{header_text}\n\n{paragraph_text}"
27
+
28
+ # Remove multiple whitespaces and newlines
29
+ visible_text = re.sub(r'\s+', ' ', visible_text)
30
+ return visible_text.strip()
31
+ except Exception as e:
32
+ st.error(f"Error occurred while scraping the data: {e}")
33
+ return None
34
+
35
+ # Streamlit UI
36
+ def main():
37
+ st.title("Web Data Scraper")
38
+
39
+ # Get the URL from the user
40
+ url_input = st.text_input("Enter the URL of the web page:", "")
41
+
42
+ if st.button("Scrape Visible Text"):
43
+ if url_input:
44
+ # Extract visible text from the URL
45
+ data = scrape_visible_text_from_url(url_input)
46
+ if data:
47
+ st.success("Visible text successfully scraped!")
48
+ st.subheader("Scraped Text:")
49
+ st.write(data)
50
+ else:
51
+ st.warning("Failed to scrape visible text from the URL.")
52
+ else:
53
+ st.warning("Please enter a valid URL.")
54
+
55
+ if __name__ == "__main__":
56
+ main()
requirements.txt ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.5
2
+ aiosignal==1.3.1
3
+ altair==5.0.1
4
+ async-timeout==4.0.2
5
+ attrs==23.1.0
6
+ beautifulsoup4==4.12.2
7
+ blinker==1.6.2
8
+ bs4==0.0.1
9
+ cachetools==5.3.1
10
+ certifi==2023.7.22
11
+ charset-normalizer==3.2.0
12
+ click==8.1.6
13
+ decorator==5.1.1
14
+ frozenlist==1.4.0
15
+ gitdb==4.0.10
16
+ GitPython==3.1.32
17
+ idna==3.4
18
+ importlib-metadata==6.8.0
19
+ Jinja2==3.1.2
20
+ jsonschema==4.18.4
21
+ jsonschema-specifications==2023.7.1
22
+ markdown-it-py==3.0.0
23
+ MarkupSafe==2.1.3
24
+ mdurl==0.1.2
25
+ multidict==6.0.4
26
+ numpy==1.25.2
27
+ openai==0.27.8
28
+ packaging==23.1
29
+ pandas==2.0.3
30
+ Pillow==9.5.0
31
+ protobuf==4.23.4
32
+ pyarrow==12.0.1
33
+ pydeck==0.8.0
34
+ Pygments==2.15.1
35
+ Pympler==1.0.1
36
+ python-dateutil==2.8.2
37
+ python-dotenv==1.0.0
38
+ pytz==2023.3
39
+ pytz-deprecation-shim==0.1.0.post0
40
+ referencing==0.30.0
41
+ requests==2.31.0
42
+ rich==13.5.2
43
+ rpds-py==0.9.2
44
+ six==1.16.0
45
+ smmap==5.0.0
46
+ soupsieve==2.4.1
47
+ streamlit==1.25.0
48
+ tenacity==8.2.2
49
+ toml==0.10.2
50
+ toolz==0.12.0
51
+ tornado==6.3.2
52
+ tqdm==4.65.0
53
+ typing_extensions==4.7.1
54
+ tzdata==2023.3
55
+ tzlocal==4.3.1
56
+ urllib3==2.0.4
57
+ validators==0.20.0
58
+ watchdog==3.0.0
59
+ yarl==1.9.2
60
+ zipp==3.16.2