feat: dump the tweets from given username for data analysis

This commit is contained in:
Shreeram 2023-10-12 10:43:19 +05:30
parent 8ec5ea8d32
commit 474c4ebb79
3 changed files with 100 additions and 0 deletions

22
X_Scrapper/README.md Normal file
View File

@ -0,0 +1,22 @@
# X Scrapper
Use to scrape the tweets from given username (including the metadata of the tweet - location of user, views, likes etc.) using `tweepy`.
## Use case
1. To analyze the (sentiment trend of the given user)[https://github.com/iamshreeram/twitter-senti-analyzer] over the period of time (on given topic or anything)
2. Further analysis of user behaviour using geo-location, time of tweets,
### Requirements
Python 3.xx
tweepy
```bash
pip install tweepy
```
### Usage
python main.py <twitter_username>
### Note :
1. This requires you to have the consumer key, consumer secret, access key and access secret from your x.com account

View File

@ -0,0 +1 @@
tweepy[async]

77
X_Scrapper/x_scraper.py Normal file
View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
# encoding: utf-8
import sys
try:
import tweepy #https://github.com/tweepy/tweepy
except ImportError:
print("You'll need tweepy instaled on your system.")
sys.exit()
try:
import csv
except ImportError:
print("You'll need the python csv module instaled on your system.")
sys.exit()
consumer_key = "xxx"
consumer_secret = "yyy"
access_key = "aa-zzzz"
access_secret = "bbb"
def get_all_tweets(screen_name):
if (consumer_key == ""):
print("You need to set up the script first. Edit it and add your keys.")
return
#Twitter only allows access to a users most recent 3240 tweets with this method
#authorize x, initialize tweepy
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth)
#initialize a list to hold all the tweepy Tweets
alltweets = []
#make initial request for most recent tweets (200 is the maximum allowed count)
new_tweets = api.user_timeline(screen_name = screen_name,count=200)
#save most recent tweets
alltweets.extend(new_tweets)
#save the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
#keep grabbing tweets until there are no tweets left to grab
while len(new_tweets) > 0:
print("getting tweets before %s" % (oldest))
#all subsiquent requests use the max_id param to prevent duplicates
new_tweets = api.user_timeline(screen_name = screen_name,count=200,max_id=oldest)
#save most recent tweets
alltweets.extend(new_tweets)
#update the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
print("...%s tweets downloaded so far" % (len(alltweets)))
#transform the tweepy tweets into a 2D array that will populate the csv
outtweets = [[tweet.id_str, tweet.created_at, tweet.text] for tweet in alltweets]
#write the csv
with open('%s_tweets.csv' % screen_name, 'w') as f:
writer = csv.writer(f)
writer.writerow(["id","created_at","text"])
writer.writerows(outtweets)
pass
if __name__ == '__main__':
if (len(sys.argv) == 2):
get_all_tweets(sys.argv[1])
else:
print("Please add the x account you want to back up as an argument.")