Compare commits
2 Commits
c9fdf31745
...
ad3a2fb64e
Author | SHA1 | Date |
---|---|---|
Wizzard | ad3a2fb64e | |
Wizzard | d0bf08b6de |
30
main.py
30
main.py
|
@ -27,36 +27,43 @@ def add_downloaded_video(video_id):
|
||||||
with open(downloaded_videos_file, 'a') as file:
|
with open(downloaded_videos_file, 'a') as file:
|
||||||
file.write(video_id + '\n')
|
file.write(video_id + '\n')
|
||||||
|
|
||||||
def get_latest_video_url(channel_id):
|
def get_all_videos(channel_id):
|
||||||
rss_url = f"https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}"
|
rss_url = f"https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}"
|
||||||
feed = feedparser.parse(rss_url)
|
feed = feedparser.parse(rss_url)
|
||||||
latest_entry = feed.entries[0] if feed.entries else None
|
return [
|
||||||
return latest_entry.link, latest_entry.yt_videoid if latest_entry else (None, None)
|
(entry.link, entry.yt_videoid, entry.published_parsed)
|
||||||
|
for entry in feed.entries
|
||||||
|
]
|
||||||
|
|
||||||
@tasks.loop(minutes=5)
|
@tasks.loop(minutes=5)
|
||||||
async def check_new_videos():
|
async def check_new_videos():
|
||||||
downloaded_videos = get_downloaded_videos()
|
downloaded_videos = get_downloaded_videos()
|
||||||
|
max_videos_to_download = 9
|
||||||
|
|
||||||
for channel_id in YOUTUBE_CHANNEL_IDS:
|
for channel_id in YOUTUBE_CHANNEL_IDS:
|
||||||
print(f"Checking new videos for channel: {channel_id}")
|
print(f"Checking new videos for channel: {channel_id}")
|
||||||
try:
|
try:
|
||||||
video_url, video_id = get_latest_video_url(channel_id)
|
videos = get_all_videos(channel_id)
|
||||||
if video_url and video_id not in downloaded_videos:
|
videos.sort(key=lambda x: x[2])
|
||||||
|
undownloaded_videos = [(video_url, video_id) for video_url, video_id, _ in videos if video_id not in downloaded_videos]
|
||||||
|
videos_to_download = undownloaded_videos[-max_videos_to_download:]
|
||||||
|
for video_url, video_id in videos_to_download:
|
||||||
|
if video_url and video_id:
|
||||||
channel_download_dir = os.path.join(download_dir, channel_id)
|
channel_download_dir = os.path.join(download_dir, channel_id)
|
||||||
os.makedirs(channel_download_dir, exist_ok=True)
|
os.makedirs(channel_download_dir, exist_ok=True)
|
||||||
|
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
'format': 'best',
|
'format': 'bestvideo+bestaudio/best',
|
||||||
'outtmpl': f'{channel_download_dir}/%(title)s [%(id)s].%(ext)s',
|
'outtmpl': f'{channel_download_dir}/%(title)s [%(id)s].%(ext)s',
|
||||||
|
'cookiefile': 'cookies.txt',
|
||||||
}
|
}
|
||||||
|
|
||||||
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
|
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
|
||||||
ydl.download([video_url])
|
ydl.download([video_url])
|
||||||
add_downloaded_video(video_id)
|
add_downloaded_video(video_id)
|
||||||
channel = bot.get_channel(int(DISCORD_CHANNEL_ID))
|
channel = bot.get_channel(int(DISCORD_CHANNEL_ID))
|
||||||
await channel.send(f"@everyone New tard video dropped and has been archived: {video_url}")
|
await channel.send(f"@everyone New video dropped and has been archived: {video_url}")
|
||||||
print(f"Downloaded and notified for channel: {channel_id}")
|
print(f"Downloaded and notified for video {video_id} from channel: {channel_id}")
|
||||||
else:
|
else:
|
||||||
print(f"No new videos or already downloaded for channel: {channel_id}")
|
print(f"Video URL or ID missing for video ID {video_id} from channel: {channel_id}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error processing channel {channel_id}: {e}")
|
print(f"Error processing channel {channel_id}: {e}")
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(5)
|
||||||
|
@ -67,4 +74,3 @@ async def on_ready():
|
||||||
check_new_videos.start()
|
check_new_videos.start()
|
||||||
|
|
||||||
bot.run(DISCORD_BOT_TOKEN)
|
bot.run(DISCORD_BOT_TOKEN)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue