Use session for profile update

This commit is contained in:
jeancf 2023-10-31 15:24:14 +01:00
parent 123bf33faf
commit 7e92516f3b

View File

@ -155,8 +155,22 @@ def main(argv):
# Select random nitter instance to fetch updates from
nitter_url = 'https://' + TOML['options']['nitter_instances'][random.randint(0, len(TOML['options']['nitter_instances']) - 1)]
# Initiate session
session = requests.Session()
# Get a copy of the default headers that requests would use
headers = requests.utils.default_headers()
# Update default headers with randomly selected user agent
headers.update(
{
'User-Agent': USER_AGENTS[random.randint(0, len(USER_AGENTS) - 1)],
'Cookie': 'replaceTwitter=; replaceYouTube=; hlsPlayback=on; proxyVideos=',
}
)
# Load twitter page of user
soup, timeline = get_timeline(nitter_url)
soup, timeline = get_timeline(session, nitter_url)
logging.info('Processing ' + str(len(timeline)) + ' tweets found in timeline')
@ -349,7 +363,7 @@ def main(argv):
mastodon = None
# Update profile if it has changed
mastodon = update_profile(nitter_url, soup, sql, mast_password)
mastodon = update_profile(session, nitter_url, soup, sql, mast_password)
# Login to account on maston instance
if len(tweets) != 0 and mastodon is None:
@ -628,10 +642,11 @@ def build_config(args):
exit(-1)
def get_timeline(nitter_url):
def get_timeline(session, nitter_url):
"""
Download timeline of twitter account
:param url: url of the account page to download
:param session: configured requests session including user agent
:param nitter_url: url of the account page to download
:return: list of tuples with url of tweet replied-to (or None) and content of tweet
"""
# Define url to use
@ -641,20 +656,6 @@ def get_timeline(nitter_url):
if TOML['options']['post_reply_to']:
url += '/with_replies'
# Initiate session
session = requests.Session()
# Get a copy of the default headers that requests would use
headers = requests.utils.default_headers()
# Update default headers with randomly selected user agent
headers.update(
{
'User-Agent': USER_AGENTS[random.randint(0, len(USER_AGENTS) - 1)],
'Cookie': 'replaceTwitter=; replaceYouTube=; hlsPlayback=on; proxyVideos=',
}
)
# Download twitter page of user
try:
twit_account_page = session.get(url, headers=headers, timeout=HTTPS_REQ_TIMEOUT)
@ -923,11 +924,12 @@ def process_attachments(nitter_url, attachments_container, status_id, author_acc
return pics, vid_in_tweet
def update_profile(nitter_url, soup, sql, mast_password):
def update_profile(session, nitter_url, soup, sql, mast_password):
"""
Update profile on Mastodon
Check if avatar or banner pictures have changed since last run
If they have, download them and upload them on the Mastodon account profile
:param session: Confifgured requests session including user agent
:param nitter_url: url of the Nitter instance that is being used
:param soup: BeautifulSoup object containing the page
:param sql: database connection
@ -989,7 +991,7 @@ def update_profile(nitter_url, soup, sql, mast_password):
new_banner_mime = None
# Download images
new_avatar = requests.get(nitter_url + new_avatar_url, timeout=HTTPS_REQ_TIMEOUT) if new_avatar_url is not None else None
new_avatar = session.get(nitter_url + new_avatar_url, timeout=HTTPS_REQ_TIMEOUT) if new_avatar_url is not None else None
if new_avatar is not None:
new_avatar_img = new_avatar.content if new_avatar.status_code == 200 else None
new_avatar_mime = new_avatar.headers['content-type'] if new_avatar.status_code == 200 else None
@ -999,7 +1001,7 @@ def update_profile(nitter_url, soup, sql, mast_password):
else:
logging.debug("Avatar image downloaded")
new_banner = requests.get(nitter_url + new_banner_url, timeout=HTTPS_REQ_TIMEOUT) if new_banner_url is not None else None
new_banner = session.get(nitter_url + new_banner_url, timeout=HTTPS_REQ_TIMEOUT) if new_banner_url is not None else None
if new_banner is not None:
new_banner_img = new_banner.content if new_banner.status_code == 200 else None
new_banner_mime = new_banner.headers['content-type'] if new_banner.status_code == 200 else None