Merge pull request #276 from royninja/master

Email Extractor
This commit is contained in:
Advaita Saha 2022-10-09 23:07:31 +05:30 committed by GitHub
commit 3b7fd18cee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 67 additions and 0 deletions

View File

@ -0,0 +1,16 @@
# Email Extractor with Python
This is a script that takes input as a website and collect all the email address into a csv file.
### Setup
- Install the requirements (refer below)
- Run the script by 'python email_extractor.py'
- Input the website to collect emails
### Requirements
```pip install -r requirements.txt```
### usage
```python email_extractor.py```

View File

@ -0,0 +1,45 @@
import requests
from bs4 import BeautifulSoup
import urllib.request
from email_scraper import scrape_emails
import pandas as pd
from google.colab import files
urlid = input("Enter Website url (i.e.: example.com): ")
url = "https://"+urlid+"/"
reqs = requests.get(url)
soup = BeautifulSoup(reqs.text, 'html.parser')
urls = []
response = []
email = []
for link in soup.find_all('a'):
urls.append(link.get('href'))
for i in range(len(urls)):
if(urls[i].startswith("https://")):
fp = urllib.request.urlopen(url+urls[i])
mybytes = fp.read()
mystr = mybytes.decode("utf8")
fp.close()
response.append(scrape_emails(mystr))
else:
fp = urllib.request.urlopen(url+urls[i])
mybytes = fp.read()
mystr = mybytes.decode("utf8")
fp.close()
response.append(scrape_emails(mystr))
for r in range(len(response)):
if not response[r]:
continue
else:
email.append(response[r])
df = pd.DataFrame(email, columns=["Email"])
df.to_csv('email.csv', index=False)
files.download("email.csv")

View File

@ -0,0 +1,6 @@
pip install requests
pip install bs4
pip install urllib
pip install email_scraper
pip install pandas
pip install google