Merge branch 'opener-to-ydl'

This commit is contained in:
Philipp Hagemeister 2013-11-25 03:30:37 +01:00
commit 79d09f47c2
4 changed files with 97 additions and 97 deletions

View File

@ -7,8 +7,10 @@ import errno
import io import io
import json import json
import os import os
import platform
import re import re
import shutil import shutil
import subprocess
import socket import socket
import sys import sys
import time import time
@ -18,6 +20,7 @@ if os.name == 'nt':
import ctypes import ctypes
from .utils import ( from .utils import (
compat_cookiejar,
compat_http_client, compat_http_client,
compat_print, compat_print,
compat_str, compat_str,
@ -32,8 +35,10 @@ from .utils import (
ExtractorError, ExtractorError,
format_bytes, format_bytes,
locked_file, locked_file,
make_HTTPS_handler,
MaxDownloadsReached, MaxDownloadsReached,
PostProcessingError, PostProcessingError,
platform_name,
preferredencoding, preferredencoding,
SameFileError, SameFileError,
sanitize_filename, sanitize_filename,
@ -42,9 +47,11 @@ from .utils import (
UnavailableVideoError, UnavailableVideoError,
write_json_file, write_json_file,
write_string, write_string,
YoutubeDLHandler,
) )
from .extractor import get_info_extractor, gen_extractors from .extractor import get_info_extractor, gen_extractors
from .FileDownloader import FileDownloader from .FileDownloader import FileDownloader
from .version import __version__
class YoutubeDL(object): class YoutubeDL(object):
@ -122,6 +129,9 @@ class YoutubeDL(object):
downloadarchive: File name of a file where all downloads are recorded. downloadarchive: File name of a file where all downloads are recorded.
Videos already present in the file are not downloaded Videos already present in the file are not downloaded
again. again.
cookiefile: File name where cookies should be read from and dumped to.
nocheckcertificate:Do not verify SSL certificates
proxy: URL of the proxy server to use
The following parameters are not used by YoutubeDL itself, they are used by The following parameters are not used by YoutubeDL itself, they are used by
the FileDownloader: the FileDownloader:
@ -162,6 +172,8 @@ class YoutubeDL(object):
if '%(stitle)s' in self.params['outtmpl']: if '%(stitle)s' in self.params['outtmpl']:
self.report_warning(u'%(stitle)s is deprecated. Use the %(title)s and the --restrict-filenames flag(which also secures %(uploader)s et al) instead.') self.report_warning(u'%(stitle)s is deprecated. Use the %(title)s and the --restrict-filenames flag(which also secures %(uploader)s et al) instead.')
self._setup_opener()
def add_info_extractor(self, ie): def add_info_extractor(self, ie):
"""Add an InfoExtractor object to the end of the list.""" """Add an InfoExtractor object to the end of the list."""
self._ies.append(ie) self._ies.append(ie)
@ -243,6 +255,9 @@ class YoutubeDL(object):
def __exit__(self, *args): def __exit__(self, *args):
self.restore_console_title() self.restore_console_title()
if self.params.get('cookiefile') is not None:
self.cookiejar.save()
def fixed_template(self): def fixed_template(self):
"""Checks if the output template is fixed.""" """Checks if the output template is fixed."""
return (re.search(u'(?u)%\\(.+?\\)s', self.params['outtmpl']) is None) return (re.search(u'(?u)%\\(.+?\\)s', self.params['outtmpl']) is None)
@ -789,7 +804,7 @@ class YoutubeDL(object):
for url in url_list: for url in url_list:
try: try:
#It also downloads the videos #It also downloads the videos
videos = self.extract_info(url) self.extract_info(url)
except UnavailableVideoError: except UnavailableVideoError:
self.report_error(u'unable to download video') self.report_error(u'unable to download video')
except MaxDownloadsReached: except MaxDownloadsReached:
@ -914,3 +929,73 @@ class YoutubeDL(object):
'_resolution': u'resolution', 'format_note': u'note'}, idlen=idlen) '_resolution': u'resolution', 'format_note': u'note'}, idlen=idlen)
self.to_screen(u'[info] Available formats for %s:\n%s\n%s' % self.to_screen(u'[info] Available formats for %s:\n%s\n%s' %
(info_dict['id'], header_line, u"\n".join(formats_s))) (info_dict['id'], header_line, u"\n".join(formats_s)))
def urlopen(self, req):
""" Start an HTTP download """
return self._opener.open(req)
def print_debug_header(self):
if not self.params.get('verbose'):
return
write_string(u'[debug] youtube-dl version ' + __version__ + u'\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
write_string(u'[debug] Git HEAD: ' + out + u'\n')
except:
try:
sys.exc_clear()
except:
pass
write_string(u'[debug] Python version %s - %s' %
(platform.python_version(), platform_name()) + u'\n')
proxy_map = {}
for handler in self._opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
write_string(u'[debug] Proxy map: ' + compat_str(proxy_map) + u'\n')
def _setup_opener(self, timeout=300):
opts_cookiefile = self.params.get('cookiefile')
opts_proxy = self.params.get('proxy')
if opts_cookiefile is None:
self.cookiejar = compat_cookiejar.CookieJar()
else:
self.cookiejar = compat_cookiejar.MozillaCookieJar(
opts_cookiefile)
if os.access(opts_cookiefile, os.R_OK):
self.cookiejar.load()
cookie_processor = compat_urllib_request.HTTPCookieProcessor(
self.cookiejar)
if opts_proxy is not None:
if opts_proxy == '':
proxies = {}
else:
proxies = {'http': opts_proxy, 'https': opts_proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = compat_urllib_request.ProxyHandler(proxies)
https_handler = make_HTTPS_handler(
self.params.get('nocheckcertificate', False))
opener = compat_urllib_request.build_opener(
https_handler, proxy_handler, cookie_processor, YoutubeDLHandler())
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
self._opener = opener
# TODO remove this global modification
compat_urllib_request.install_opener(opener)
socket.setdefaulttimeout(timeout)

View File

@ -41,45 +41,35 @@ __authors__ = (
__license__ = 'Public Domain' __license__ = 'Public Domain'
import codecs import codecs
import collections
import getpass import getpass
import optparse import optparse
import os import os
import random import random
import re import re
import shlex import shlex
import socket
import subprocess import subprocess
import sys import sys
import traceback
import platform
from .utils import ( from .utils import (
compat_cookiejar,
compat_print, compat_print,
compat_str,
compat_urllib_request,
DateRange, DateRange,
decodeOption, decodeOption,
determine_ext, determine_ext,
DownloadError, DownloadError,
get_cachedir, get_cachedir,
make_HTTPS_handler,
MaxDownloadsReached, MaxDownloadsReached,
platform_name,
preferredencoding, preferredencoding,
SameFileError, SameFileError,
std_headers, std_headers,
write_string, write_string,
YoutubeDLHandler,
) )
from .update import update_self from .update import update_self
from .version import __version__
from .FileDownloader import ( from .FileDownloader import (
FileDownloader, FileDownloader,
) )
from .extractor import gen_extractors from .extractor import gen_extractors
from .version import __version__
from .YoutubeDL import YoutubeDL from .YoutubeDL import YoutubeDL
from .PostProcessor import ( from .PostProcessor import (
FFmpegMetadataPP, FFmpegMetadataPP,
@ -452,19 +442,6 @@ def _real_main(argv=None):
parser, opts, args = parseOpts(argv) parser, opts, args = parseOpts(argv)
# Open appropriate CookieJar
if opts.cookiefile is None:
jar = compat_cookiejar.CookieJar()
else:
try:
jar = compat_cookiejar.MozillaCookieJar(opts.cookiefile)
if os.access(opts.cookiefile, os.R_OK):
jar.load()
except (IOError, OSError) as err:
if opts.verbose:
traceback.print_exc()
write_string(u'ERROR: unable to open cookie file\n')
sys.exit(101)
# Set user agent # Set user agent
if opts.user_agent is not None: if opts.user_agent is not None:
std_headers['User-Agent'] = opts.user_agent std_headers['User-Agent'] = opts.user_agent
@ -496,8 +473,6 @@ def _real_main(argv=None):
all_urls = batchurls + args all_urls = batchurls + args
all_urls = [url.strip() for url in all_urls] all_urls = [url.strip() for url in all_urls]
opener = _setup_opener(jar=jar, opts=opts)
extractors = gen_extractors() extractors = gen_extractors()
if opts.list_extractors: if opts.list_extractors:
@ -552,7 +527,7 @@ def _real_main(argv=None):
if opts.retries is not None: if opts.retries is not None:
try: try:
opts.retries = int(opts.retries) opts.retries = int(opts.retries)
except (TypeError, ValueError) as err: except (TypeError, ValueError):
parser.error(u'invalid retry count specified') parser.error(u'invalid retry count specified')
if opts.buffersize is not None: if opts.buffersize is not None:
numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize) numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
@ -563,13 +538,13 @@ def _real_main(argv=None):
opts.playliststart = int(opts.playliststart) opts.playliststart = int(opts.playliststart)
if opts.playliststart <= 0: if opts.playliststart <= 0:
raise ValueError(u'Playlist start must be positive') raise ValueError(u'Playlist start must be positive')
except (TypeError, ValueError) as err: except (TypeError, ValueError):
parser.error(u'invalid playlist start number specified') parser.error(u'invalid playlist start number specified')
try: try:
opts.playlistend = int(opts.playlistend) opts.playlistend = int(opts.playlistend)
if opts.playlistend != -1 and (opts.playlistend <= 0 or opts.playlistend < opts.playliststart): if opts.playlistend != -1 and (opts.playlistend <= 0 or opts.playlistend < opts.playliststart):
raise ValueError(u'Playlist end must be greater than playlist start') raise ValueError(u'Playlist end must be greater than playlist start')
except (TypeError, ValueError) as err: except (TypeError, ValueError):
parser.error(u'invalid playlist end number specified') parser.error(u'invalid playlist end number specified')
if opts.extractaudio: if opts.extractaudio:
if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']: if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
@ -672,34 +647,12 @@ def _real_main(argv=None):
'youtube_print_sig_code': opts.youtube_print_sig_code, 'youtube_print_sig_code': opts.youtube_print_sig_code,
'age_limit': opts.age_limit, 'age_limit': opts.age_limit,
'download_archive': opts.download_archive, 'download_archive': opts.download_archive,
'cookiefile': opts.cookiefile,
'nocheckcertificate': opts.no_check_certificate,
} }
with YoutubeDL(ydl_opts) as ydl: with YoutubeDL(ydl_opts) as ydl:
if opts.verbose: ydl.print_debug_header()
write_string(u'[debug] youtube-dl version ' + __version__ + u'\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
write_string(u'[debug] Git HEAD: ' + out + u'\n')
except:
try:
sys.exc_clear()
except:
pass
write_string(u'[debug] Python version %s - %s' %
(platform.python_version(), platform_name()) + u'\n')
proxy_map = {}
for handler in opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
write_string(u'[debug] Proxy map: ' + compat_str(proxy_map) + u'\n')
ydl.add_default_info_extractors() ydl.add_default_info_extractors()
# PostProcessors # PostProcessors
@ -730,46 +683,9 @@ def _real_main(argv=None):
ydl.to_screen(u'--max-download limit reached, aborting.') ydl.to_screen(u'--max-download limit reached, aborting.')
retcode = 101 retcode = 101
# Dump cookie jar if requested
if opts.cookiefile is not None:
try:
jar.save()
except (IOError, OSError):
sys.exit(u'ERROR: unable to save cookie jar')
sys.exit(retcode) sys.exit(retcode)
def _setup_opener(jar=None, opts=None, timeout=300):
if opts is None:
FakeOptions = collections.namedtuple(
'FakeOptions', ['proxy', 'no_check_certificate'])
opts = FakeOptions(proxy=None, no_check_certificate=False)
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
if opts.proxy is not None:
if opts.proxy == '':
proxies = {}
else:
proxies = {'http': opts.proxy, 'https': opts.proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = compat_urllib_request.ProxyHandler(proxies)
https_handler = make_HTTPS_handler(opts)
opener = compat_urllib_request.build_opener(
https_handler, proxy_handler, cookie_processor, YoutubeDLHandler())
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
compat_urllib_request.install_opener(opener)
socket.setdefaulttimeout(timeout)
return opener
def main(argv=None): def main(argv=None):
try: try:
_real_main(argv) _real_main(argv)

View File

@ -9,7 +9,6 @@ import xml.etree.ElementTree
from ..utils import ( from ..utils import (
compat_http_client, compat_http_client,
compat_urllib_error, compat_urllib_error,
compat_urllib_request,
compat_str, compat_str,
clean_html, clean_html,
@ -20,6 +19,7 @@ from ..utils import (
unescapeHTML, unescapeHTML,
) )
class InfoExtractor(object): class InfoExtractor(object):
"""Information Extractor class. """Information Extractor class.
@ -158,7 +158,7 @@ class InfoExtractor(object):
elif note is not False: elif note is not False:
self.to_screen(u'%s: %s' % (video_id, note)) self.to_screen(u'%s: %s' % (video_id, note))
try: try:
return compat_urllib_request.urlopen(url_or_request) return self._downloader.urlopen(url_or_request)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err: except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
if errnote is None: if errnote is None:
errnote = u'Unable to download webpage' errnote = u'Unable to download webpage'

View File

@ -538,8 +538,7 @@ def formatSeconds(secs):
else: else:
return '%d' % secs return '%d' % secs
def make_HTTPS_handler(opts_no_check_certificate):
def make_HTTPS_handler(opts):
if sys.version_info < (3, 2): if sys.version_info < (3, 2):
import httplib import httplib
@ -566,7 +565,7 @@ def make_HTTPS_handler(opts):
context.set_default_verify_paths() context.set_default_verify_paths()
context.verify_mode = (ssl.CERT_NONE context.verify_mode = (ssl.CERT_NONE
if opts.no_check_certificate if opts_no_check_certificate
else ssl.CERT_REQUIRED) else ssl.CERT_REQUIRED)
return compat_urllib_request.HTTPSHandler(context=context) return compat_urllib_request.HTTPSHandler(context=context)