fix small edge cases
This commit is contained in:
parent
d0b976f784
commit
9f2ab61ee7
1 changed files with 20 additions and 8 deletions
20
newbot.py
20
newbot.py
|
@ -51,11 +51,15 @@ def sqllen():
|
||||||
def propagate_queue(times):
|
def propagate_queue(times):
|
||||||
res = cur.execute(f"SELECT ROWID,link FROM queue WHERE hasplayed = false " + ("ORDER BY RANDOM() " if shuffle else "") + f"LIMIT {times}")
|
res = cur.execute(f"SELECT ROWID,link FROM queue WHERE hasplayed = false " + ("ORDER BY RANDOM() " if shuffle else "") + f"LIMIT {times}")
|
||||||
for rowid,link in res.fetchall():
|
for rowid,link in res.fetchall():
|
||||||
if len(queue) > 2:
|
if len(queue) == 2:
|
||||||
|
print("the queue is already propagated refusing to evaluate!")
|
||||||
|
return
|
||||||
|
elif len(queue) > 2:
|
||||||
print(f"The queue is larger than two videos this WILL cause issues: {queue}")
|
print(f"The queue is larger than two videos this WILL cause issues: {queue}")
|
||||||
|
return
|
||||||
queue.append(link)
|
queue.append(link)
|
||||||
cur.execute(f"UPDATE queue SET hasplayed = true WHERE ROWID='{rowid}'")
|
cur.execute(f"UPDATE queue SET hasplayed = true WHERE ROWID='{rowid}'")
|
||||||
print("Updated a line!!")
|
print(f"added {link} to queue")
|
||||||
con.commit()
|
con.commit()
|
||||||
|
|
||||||
def video_check(info, *, incomplete):
|
def video_check(info, *, incomplete):
|
||||||
|
@ -202,6 +206,11 @@ def on_media_input_playback_ended(data):
|
||||||
obs.set_current_program_scene("waiting")
|
obs.set_current_program_scene("waiting")
|
||||||
skip_list.clear()
|
skip_list.clear()
|
||||||
if not queue:
|
if not queue:
|
||||||
|
if sqllen() >= 1:
|
||||||
|
propagate_queue(2)
|
||||||
|
download_video(0) #will be noticeably slow but this should not happen
|
||||||
|
os.rename(f"{vid_dir}/999zznext.mp4", f"{vid_dir}/{vidcounter+1}.mp4")
|
||||||
|
else:
|
||||||
obs.set_current_program_scene("waiting")
|
obs.set_current_program_scene("waiting")
|
||||||
os.remove(f"{vid_dir}/{vidcounter}.mp4")
|
os.remove(f"{vid_dir}/{vidcounter}.mp4")
|
||||||
vidcounter = vidcounter + 1
|
vidcounter = vidcounter + 1
|
||||||
|
@ -227,7 +236,10 @@ def on_media_input_playback_ended(data):
|
||||||
if len(queue) > 1:
|
if len(queue) > 1:
|
||||||
download_video(1)
|
download_video(1)
|
||||||
os.rename(f"{vid_dir}/999zznext.mp4", f"{vid_dir}/{vidcounter+1}.mp4")
|
os.rename(f"{vid_dir}/999zznext.mp4", f"{vid_dir}/{vidcounter+1}.mp4")
|
||||||
|
elif sqllen() >= 1 and len(queue) == 1:
|
||||||
|
propagate_queue(1)
|
||||||
|
download_video(1)
|
||||||
|
os.rename(f"{vid_dir}/999zznext.mp4", f"{vid_dir}/{vidcounter+1}.mp4")
|
||||||
|
|
||||||
@bot.slash_command(
|
@bot.slash_command(
|
||||||
name="stats",
|
name="stats",
|
||||||
|
@ -265,7 +277,7 @@ async def play(inter: disnake.AppCmdInter, link: str):
|
||||||
if countuser(inter.user.id) >= (int(os.getenv("MAX_QUEUE"))):
|
if countuser(inter.user.id) >= (int(os.getenv("MAX_QUEUE"))):
|
||||||
await inter.edit_original_response(f"You have reached the queue limit of {os.getenv('MAX_QUEUE')}, " + ("try again after one of your videos has played." if not (os.getenv("PERMANENT_MAX_QUEUE","FALSE") == "TRUE") else "you may not queue videos for the rest of the session."))
|
await inter.edit_original_response(f"You have reached the queue limit of {os.getenv('MAX_QUEUE')}, " + ("try again after one of your videos has played." if not (os.getenv("PERMANENT_MAX_QUEUE","FALSE") == "TRUE") else "you may not queue videos for the rest of the session."))
|
||||||
return
|
return
|
||||||
if urlparse(link).netloc == 'youtube.com' or urlparse(link).netloc == 'www.youtube.com' or urlparse(link).netloc == 'youtu.be':
|
if (urlparse(link).netloc == 'youtube.com' or urlparse(link).netloc == 'www.youtube.com' or urlparse(link).netloc == 'youtu.be') and urlparse(link).scheme == 'https':
|
||||||
cur.execute(f"""INSERT INTO queue VALUES
|
cur.execute(f"""INSERT INTO queue VALUES
|
||||||
('{link}',{inter.user.id},false)
|
('{link}',{inter.user.id},false)
|
||||||
""")
|
""")
|
||||||
|
|
Loading…
Reference in a new issue