2014-09-24 14:16:56 +02:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2013-09-23 17:59:27 +02:00
|
|
|
import os
|
2014-09-24 14:16:56 +02:00
|
|
|
import re
|
2013-09-23 17:59:27 +02:00
|
|
|
import subprocess
|
|
|
|
|
|
|
|
from .common import FileDownloader
|
2015-07-28 22:28:30 +02:00
|
|
|
from .fragment import FragmentFD
|
|
|
|
|
|
|
|
from ..compat import compat_urlparse
|
|
|
|
from ..postprocessor.ffmpeg import FFmpegPostProcessor
|
2014-12-13 12:24:42 +01:00
|
|
|
from ..utils import (
|
2015-02-01 18:49:23 +01:00
|
|
|
encodeArgument,
|
2013-09-23 17:59:27 +02:00
|
|
|
encodeFilename,
|
2015-08-30 22:28:36 +02:00
|
|
|
sanitize_open,
|
2015-11-29 05:43:59 +01:00
|
|
|
handle_youtubedl_headers,
|
2013-09-23 17:59:27 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class HlsFD(FileDownloader):
|
|
|
|
def real_download(self, filename, info_dict):
|
|
|
|
url = info_dict['url']
|
|
|
|
self.report_destination(filename)
|
|
|
|
tmpfilename = self.temp_name(filename)
|
|
|
|
|
2015-01-04 13:40:30 +01:00
|
|
|
ffpp = FFmpegPostProcessor(downloader=self)
|
2015-02-17 17:35:03 +01:00
|
|
|
if not ffpp.available:
|
2014-11-26 12:26:21 +01:00
|
|
|
self.report_error('m3u8 download detected but ffmpeg or avconv could not be found. Please install one.')
|
2014-08-27 15:50:03 +02:00
|
|
|
return False
|
2014-12-06 12:14:26 +01:00
|
|
|
ffpp.check_version()
|
2015-02-01 18:49:23 +01:00
|
|
|
|
2015-09-22 22:46:24 +02:00
|
|
|
args = [ffpp.executable, '-y']
|
|
|
|
|
2015-10-17 16:49:05 +02:00
|
|
|
if info_dict['http_headers'] and re.match(r'^https?://', url):
|
2015-09-22 22:46:24 +02:00
|
|
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
|
|
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
2015-11-29 05:43:59 +01:00
|
|
|
headers = handle_youtubedl_headers(info_dict['http_headers'])
|
2015-09-22 22:46:24 +02:00
|
|
|
args += [
|
|
|
|
'-headers',
|
2015-11-29 05:43:59 +01:00
|
|
|
''.join('%s: %s\r\n' % (key, val) for key, val in headers.items())]
|
2015-09-22 22:46:24 +02:00
|
|
|
|
2016-01-30 12:26:40 +01:00
|
|
|
args += ['-i', url, '-c', 'copy']
|
|
|
|
if self.params.get('hls_use_mpegts', False):
|
|
|
|
args += ['-f', 'mpegts']
|
|
|
|
else:
|
|
|
|
args += ['-f', 'mp4', '-bsf:a', 'aac_adtstoasc']
|
2015-09-22 22:46:24 +02:00
|
|
|
|
|
|
|
args = [encodeArgument(opt) for opt in args]
|
2015-09-17 15:22:19 +02:00
|
|
|
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
2015-02-01 18:49:23 +01:00
|
|
|
|
2015-08-13 17:10:11 +02:00
|
|
|
self._debug_cmd(args)
|
|
|
|
|
2016-01-16 21:51:25 +01:00
|
|
|
proc = subprocess.Popen(args, stdin=subprocess.PIPE)
|
|
|
|
try:
|
|
|
|
retval = proc.wait()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
# subprocces.run would send the SIGKILL signal to ffmpeg and the
|
|
|
|
# mp4 file couldn't be played, but if we ask ffmpeg to quit it
|
|
|
|
# produces a file that is playable (this is mostly useful for live
|
|
|
|
# streams)
|
|
|
|
proc.communicate(b'q')
|
|
|
|
raise
|
2013-09-23 17:59:27 +02:00
|
|
|
if retval == 0:
|
|
|
|
fsize = os.path.getsize(encodeFilename(tmpfilename))
|
2015-02-01 18:54:38 +01:00
|
|
|
self.to_screen('\r[%s] %s bytes' % (args[0], fsize))
|
2013-09-23 17:59:27 +02:00
|
|
|
self.try_rename(tmpfilename, filename)
|
|
|
|
self._hook_progress({
|
|
|
|
'downloaded_bytes': fsize,
|
|
|
|
'total_bytes': fsize,
|
|
|
|
'filename': filename,
|
|
|
|
'status': 'finished',
|
|
|
|
})
|
|
|
|
return True
|
|
|
|
else:
|
2014-11-26 12:26:21 +01:00
|
|
|
self.to_stderr('\n')
|
2015-02-13 11:14:01 +01:00
|
|
|
self.report_error('%s exited with code %d' % (ffpp.basename, retval))
|
2013-09-23 17:59:27 +02:00
|
|
|
return False
|
2014-09-24 14:16:56 +02:00
|
|
|
|
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
class NativeHlsFD(FragmentFD):
|
2014-09-24 14:16:56 +02:00
|
|
|
""" A more limited implementation that does not require ffmpeg """
|
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
FD_NAME = 'hlsnative'
|
|
|
|
|
2014-09-24 14:16:56 +02:00
|
|
|
def real_download(self, filename, info_dict):
|
2015-07-28 22:28:30 +02:00
|
|
|
man_url = info_dict['url']
|
|
|
|
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
|
|
|
|
manifest = self.ydl.urlopen(man_url).read()
|
2014-09-24 14:16:56 +02:00
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
s = manifest.decode('utf-8', 'ignore')
|
|
|
|
fragment_urls = []
|
2014-09-24 14:16:56 +02:00
|
|
|
for line in s.splitlines():
|
|
|
|
line = line.strip()
|
|
|
|
if line and not line.startswith('#'):
|
|
|
|
segment_url = (
|
|
|
|
line
|
|
|
|
if re.match(r'^https?://', line)
|
2015-07-28 22:28:30 +02:00
|
|
|
else compat_urlparse.urljoin(man_url, line))
|
|
|
|
fragment_urls.append(segment_url)
|
|
|
|
# We only download the first fragment during the test
|
|
|
|
if self.params.get('test', False):
|
2014-09-24 14:38:40 +02:00
|
|
|
break
|
2014-09-24 14:16:56 +02:00
|
|
|
|
2015-07-28 22:28:30 +02:00
|
|
|
ctx = {
|
2014-09-24 14:16:56 +02:00
|
|
|
'filename': filename,
|
2015-07-28 22:28:30 +02:00
|
|
|
'total_frags': len(fragment_urls),
|
|
|
|
}
|
|
|
|
|
|
|
|
self._prepare_and_start_frag_download(ctx)
|
|
|
|
|
|
|
|
frags_filenames = []
|
|
|
|
for i, frag_url in enumerate(fragment_urls):
|
|
|
|
frag_filename = '%s-Frag%d' % (ctx['tmpfilename'], i)
|
|
|
|
success = ctx['dl'].download(frag_filename, {'url': frag_url})
|
|
|
|
if not success:
|
|
|
|
return False
|
2015-08-30 22:28:36 +02:00
|
|
|
down, frag_sanitized = sanitize_open(frag_filename, 'rb')
|
|
|
|
ctx['dest_stream'].write(down.read())
|
2015-09-09 22:41:30 +02:00
|
|
|
down.close()
|
2015-08-30 22:28:36 +02:00
|
|
|
frags_filenames.append(frag_sanitized)
|
2015-07-28 22:28:30 +02:00
|
|
|
|
|
|
|
self._finish_frag_download(ctx)
|
|
|
|
|
|
|
|
for frag_file in frags_filenames:
|
2015-08-30 22:28:36 +02:00
|
|
|
os.remove(encodeFilename(frag_file))
|
2015-07-28 22:28:30 +02:00
|
|
|
|
2014-09-24 14:16:56 +02:00
|
|
|
return True
|