mirror of
https://github.com/cquest/tootbot.git
synced 2025-02-24 02:58:37 +00:00
Compare commits
3 Commits
f54ab85f18
...
e6ec0b20d7
Author | SHA1 | Date | |
---|---|---|---|
|
e6ec0b20d7 | ||
|
6154b7d5a9 | ||
|
735c495b41 |
88
tootbot.py
88
tootbot.py
@ -27,11 +27,11 @@ def unredir(redir):
|
|||||||
r.headers.get('Location')
|
r.headers.get('Location')
|
||||||
else:
|
else:
|
||||||
redir = r.headers.get('Location')
|
redir = r.headers.get('Location')
|
||||||
print('redir', redir)
|
# print('redir', redir)
|
||||||
if '//ow.ly/' in redir or '//bit.ly/' in redir:
|
if '//ow.ly/' in redir or '//bit.ly/' in redir:
|
||||||
redir = redir.replace('https://ow.ly/', 'http://ow.ly/') # only http
|
redir = redir.replace('https://ow.ly/', 'http://ow.ly/') # only http
|
||||||
redir = requests.get(redir, allow_redirects=False).headers.get('Location')
|
redir = requests.get(redir, allow_redirects=False).headers.get('Location')
|
||||||
print('redir+', redir)
|
# print('redir+', redir)
|
||||||
try:
|
try:
|
||||||
r = requests.get(redir, allow_redirects=False, timeout=5)
|
r = requests.get(redir, allow_redirects=False, timeout=5)
|
||||||
except:
|
except:
|
||||||
@ -223,20 +223,31 @@ else:
|
|||||||
c = html.unescape(t['tweet'])
|
c = html.unescape(t['tweet'])
|
||||||
# do not toot twitter replies
|
# do not toot twitter replies
|
||||||
if 'reply_to' in t and len(t['reply_to'])>0:
|
if 'reply_to' in t and len(t['reply_to'])>0:
|
||||||
print('Reply:',c)
|
# print('Reply:',c)
|
||||||
continue
|
continue
|
||||||
# do not toot twitter quoted RT
|
# do not toot twitter quoted RT
|
||||||
if 'quote_url' in t and t['quote_url'] != '':
|
if 'quote_url' in t and t['quote_url'] != '':
|
||||||
print('Quoted:', c)
|
# print('Quoted:', c)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check if this tweet has been processed
|
# detect threads
|
||||||
id = t['id']
|
in_reply_to = None
|
||||||
db.execute('SELECT * FROM tweets WHERE tweet = ? AND twitter = ? and mastodon = ? and instance = ?', (id, source, mastodon, instance)) # noqa
|
if 'conversation_id' in t and t['conversation_id'] not in t['link']:
|
||||||
last = db.fetchone()
|
db.execute('SELECT toot FROM tweets WHERE tweet = ? AND twitter = ?', (t['conversation_id'], source)) # noqa
|
||||||
|
thread = db.fetchone()
|
||||||
|
if thread:
|
||||||
|
print("Thread :", t['conversation_id'], t['link'], thread[0])
|
||||||
|
in_reply_to = thread[0]
|
||||||
|
|
||||||
# process only unprocessed tweets
|
# check if this tweet has been processed
|
||||||
if last:
|
id = t['id'] # old id
|
||||||
|
db.execute('SELECT * FROM tweets WHERE tweet = ? AND twitter = ? and mastodon = ? and instance = ?', (id, source, mastodon, instance)) # noqa
|
||||||
|
if db.fetchone():
|
||||||
|
continue
|
||||||
|
|
||||||
|
id = t['link'].split('/')[-1] # new id from status link to support threads
|
||||||
|
db.execute('SELECT * FROM tweets WHERE tweet = ? AND twitter = ? and mastodon = ? and instance = ?', (id, source, mastodon, instance)) # noqa
|
||||||
|
if db.fetchone():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if c[-1] == "…":
|
if c[-1] == "…":
|
||||||
@ -254,21 +265,21 @@ else:
|
|||||||
|
|
||||||
if 'photos' in t:
|
if 'photos' in t:
|
||||||
for url in t['photos']:
|
for url in t['photos']:
|
||||||
print('photo', url)
|
# print('photo', url)
|
||||||
try:
|
try:
|
||||||
media = requests.get(url.replace(
|
media = requests.get(url.replace(
|
||||||
'https://pbs.twimg.com/', 'https://nitter.net/pic/orig/'))
|
'https://pbs.twimg.com/', 'https://nitter.net/pic/orig/'))
|
||||||
print("received nitter", media.headers.get('content-type'))
|
# print("received nitter", media.headers.get('content-type'))
|
||||||
media_posted = mastodon_api.media_post(
|
media_posted = mastodon_api.media_post(
|
||||||
media.content, mime_type=media.headers.get('content-type'))
|
media.content, mime_type=media.headers.get('content-type'))
|
||||||
print("posted")
|
# print("posted")
|
||||||
toot_media.append(media_posted['id'])
|
toot_media.append(media_posted['id'])
|
||||||
except:
|
except:
|
||||||
media = requests.get(url)
|
media = requests.get(url)
|
||||||
print("received twitter", media.headers.get('content-type'))
|
# print("received twitter", media.headers.get('content-type'))
|
||||||
media_posted = mastodon_api.media_post(
|
media_posted = mastodon_api.media_post(
|
||||||
media.content, mime_type=media.headers.get('content-type'))
|
media.content, mime_type=media.headers.get('content-type'))
|
||||||
print("posted")
|
# print("posted")
|
||||||
toot_media.append(media_posted['id'])
|
toot_media.append(media_posted['id'])
|
||||||
|
|
||||||
|
|
||||||
@ -286,23 +297,30 @@ else:
|
|||||||
if m is None:
|
if m is None:
|
||||||
c = c.replace(l, redir)
|
c = c.replace(l, redir)
|
||||||
else:
|
else:
|
||||||
print('lien:',l)
|
|
||||||
c = c.replace(l, '')
|
|
||||||
video = redir
|
video = redir
|
||||||
print('video:', video)
|
# print('video:', video)
|
||||||
subprocess.run('rm -f out.mp4; yt-dlp -N 8 -o out.mp4 --recode-video mp4 --no-playlist %s --max-filesize 100M' %
|
video_json = subprocess.run('yt-dlp -s -j %s' %
|
||||||
(video,), shell=True, capture_output=False)
|
(video,), shell=True, capture_output=True)
|
||||||
print("received")
|
video_info = json.loads(video_json.stdout)
|
||||||
try:
|
if video_info['duration'] < 600:
|
||||||
file = open("out.mp4", "rb")
|
# print('lien:', l)
|
||||||
video_data = file.read()
|
c = c.replace(l, '')
|
||||||
file.close()
|
subprocess.run('rm -f out.*; yt-dlp -N 8 -o out.mp4 --recode-video mp4 --no-playlist --max-filesize 100M %s' %
|
||||||
media_posted = mastodon_api.media_post(video_data, mime_type='video/mp4')
|
(video,), shell=True, capture_output=False)
|
||||||
c = c.replace(video, '')
|
# print("received")
|
||||||
print("posted")
|
try:
|
||||||
toot_media.append(media_posted['id'])
|
file = open("out.mp4", "rb")
|
||||||
except:
|
video_data = file.read()
|
||||||
pass
|
file.close()
|
||||||
|
media_posted = mastodon_api.media_post(video_data, mime_type='video/mp4')
|
||||||
|
c = c.replace(video, '')
|
||||||
|
# print("posted")
|
||||||
|
toot_media.append(media_posted['id'])
|
||||||
|
os.remove("out.mp4")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print("video duration > 600s : ", video_info['duration'])
|
||||||
|
|
||||||
# remove pic.twitter.com links
|
# remove pic.twitter.com links
|
||||||
m = re.search(r"pic.twitter.com[^ \xa0]*", c)
|
m = re.search(r"pic.twitter.com[^ \xa0]*", c)
|
||||||
@ -328,16 +346,16 @@ else:
|
|||||||
if len(toot_media)>0:
|
if len(toot_media)>0:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
toot = mastodon_api.status_post(c,
|
toot = mastodon_api.status_post(c,
|
||||||
in_reply_to_id=None,
|
in_reply_to_id=in_reply_to,
|
||||||
media_ids=toot_media,
|
media_ids=toot_media,
|
||||||
sensitive=False,
|
sensitive=False,
|
||||||
visibility='unlisted',
|
visibility='unlisted',
|
||||||
spoiler_text=None)
|
spoiler_text=None)
|
||||||
except:
|
except:
|
||||||
print("10s delay")
|
print("delay")
|
||||||
time.sleep(10)
|
time.sleep(30)
|
||||||
toot = mastodon_api.status_post(c,
|
toot = mastodon_api.status_post(c,
|
||||||
in_reply_to_id=None,
|
in_reply_to_id=in_reply_to,
|
||||||
media_ids=toot_media,
|
media_ids=toot_media,
|
||||||
sensitive=False,
|
sensitive=False,
|
||||||
visibility='unlisted',
|
visibility='unlisted',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user