From 1dfd002e53775cfcfef7bf2cdffdfb9087516244 Mon Sep 17 00:00:00 2001 From: Claude Heiland-Allen Date: Wed, 14 Jun 2023 23:53:17 +0100 Subject: [PATCH] transcode to opus Signed-off-by: ugrnm --- README | 1 + TODO | 2 -- download_loooooops.py | 33 ++++++++++++++++++++++++++++++--- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/README b/README index 9f698c5..93753ad 100644 --- a/README +++ b/README @@ -15,6 +15,7 @@ There are two scripts: * download_loooooops.py * gets latest toots tagged with #looptoper * download all the attachment if new + * transcodes to opus * generates new playlist and tells ezstream to reload it diff --git a/TODO b/TODO index 80352db..a142ec9 100644 --- a/TODO +++ b/TODO @@ -3,8 +3,6 @@ TODO * cache the json parsing/process locally for faster restart tests * only download MP3 files -* extract relevant json stuff to fill in or update ID3 tags * 80c :) -* convert the downloaded mp3 files to opus * get rid of request, just use urlib or something * build the loop inside the download script diff --git a/download_loooooops.py b/download_loooooops.py index b68b84e..6a6d79d 100755 --- a/download_loooooops.py +++ b/download_loooooops.py @@ -11,8 +11,27 @@ from urllib.parse import urlparse output_dir = "loooooops" +bitrate = "128k" + +def transcode_media(path, media_item, metadata): + infile = os.path.join(path, media_item) + outfile = os.path.join(path, media_item + ".opus") + if not os.path.exists(outfile): + print("transcodeing to {}".format(outfile)) + pid = os.fork() + if pid == 0: + artist = metadata["creator"] + title = metadata["url"] + comment = metadata["description"] + date = metadata["date"] + os.execlp("ffmpeg", "ffmpeg", "-hide_banner", "-loglevel", "error", "-i", infile, "-map_metadata", "-1", "-metadata", "artist={}".format(artist), "-metadata", "title={}".format(title), "-metadata", "creation_time={}".format(date), "-map_chapters", "-1", "-ac", "2", "-af", "loudnorm=dual_mono=true", "-b:a", bitrate, "-y", outfile) + # never reached + else: + os.wait() + def grab_media(path, url): + try: media_item = urlparse(url).path.split('/')[-1] headers = { @@ -20,13 +39,19 @@ def grab_media(path, url): 'From': 'post.lurk.org/@lurk' # This is another valid field } - if not os.path.exists(os.path.join(path, media_item)): + if os.path.exists(os.path.join(path, media_item)): + return media_item + else: response = requests.get(url, headers=headers, stream=True) if response.ok: with open(os.path.join(path, media_item), 'wb') as media_file: shutil.copyfileobj(response.raw, media_file) print('Downloaded media {} from {}'.format(media_item, urlparse(url).netloc)) return media_item + except requests.exceptions.ConnectionError as e: + # maybe transient network issues + print(e) + sleep(60) #This pages through all the looptober tag and collects the json in 'data' there_is_more = True @@ -92,10 +117,12 @@ for l in looooops: else: url = a['url'] - grab_media(path, url) + media_item = grab_media(path, url) + if media_item: + transcode_media(path, media_item, l) # Once we've done everythin we generate a playlist and ask ezstream # to reload it -os.system('find . -iname "*mp3" > playlist_loooooops.m3u'\ +os.system('find . -iname "*opus" > playlist_loooooops.m3u'\ '&& kill -s HUP `pidof ezstream`')