mirror of
https://codeberg.org/polarisfm/youtube-dl
synced 2024-11-22 16:44:32 +01:00
Py2/3 parse_qs compatibility
This commit is contained in:
parent
a130bc6d02
commit
9f37a95941
@ -11,7 +11,6 @@ import email.utils
|
|||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
import random
|
import random
|
||||||
import math
|
import math
|
||||||
from urlparse import parse_qs
|
|
||||||
|
|
||||||
from utils import *
|
from utils import *
|
||||||
|
|
||||||
@ -329,7 +328,7 @@ class YoutubeIE(InfoExtractor):
|
|||||||
request = compat_urllib_request.Request(video_info_url)
|
request = compat_urllib_request.Request(video_info_url)
|
||||||
try:
|
try:
|
||||||
video_info_webpage = compat_urllib_request.urlopen(request).read()
|
video_info_webpage = compat_urllib_request.urlopen(request).read()
|
||||||
video_info = parse_qs(video_info_webpage)
|
video_info = compat_parse_qs(video_info_webpage)
|
||||||
if 'token' in video_info:
|
if 'token' in video_info:
|
||||||
break
|
break
|
||||||
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
||||||
@ -437,7 +436,7 @@ class YoutubeIE(InfoExtractor):
|
|||||||
video_url_list = [(None, video_info['conn'][0])]
|
video_url_list = [(None, video_info['conn'][0])]
|
||||||
elif 'url_encoded_fmt_stream_map' in video_info and len(video_info['url_encoded_fmt_stream_map']) >= 1:
|
elif 'url_encoded_fmt_stream_map' in video_info and len(video_info['url_encoded_fmt_stream_map']) >= 1:
|
||||||
url_data_strs = video_info['url_encoded_fmt_stream_map'][0].split(',')
|
url_data_strs = video_info['url_encoded_fmt_stream_map'][0].split(',')
|
||||||
url_data = [parse_qs(uds) for uds in url_data_strs]
|
url_data = [compat_parse_qs(uds) for uds in url_data_strs]
|
||||||
url_data = filter(lambda ud: 'itag' in ud and 'url' in ud, url_data)
|
url_data = filter(lambda ud: 'itag' in ud and 'url' in ud, url_data)
|
||||||
url_map = dict((ud['itag'][0], ud['url'][0] + '&signature=' + ud['sig'][0]) for ud in url_data)
|
url_map = dict((ud['itag'][0], ud['url'][0] + '&signature=' + ud['sig'][0]) for ud in url_data)
|
||||||
|
|
||||||
@ -594,7 +593,7 @@ class MetacafeIE(InfoExtractor):
|
|||||||
if mobj is None:
|
if mobj is None:
|
||||||
self._downloader.trouble(u'ERROR: unable to extract media URL')
|
self._downloader.trouble(u'ERROR: unable to extract media URL')
|
||||||
return
|
return
|
||||||
vardict = parse_qs(mobj.group(1))
|
vardict = compat_parse_qs(mobj.group(1))
|
||||||
if 'mediaData' not in vardict:
|
if 'mediaData' not in vardict:
|
||||||
self._downloader.trouble(u'ERROR: unable to extract media URL')
|
self._downloader.trouble(u'ERROR: unable to extract media URL')
|
||||||
return
|
return
|
||||||
|
@ -33,19 +33,23 @@ except ImportError: # Python 2
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import html.entities as compat_html_entities
|
import html.entities as compat_html_entities
|
||||||
except NameError: # Python 2
|
except ImportError: # Python 2
|
||||||
import htmlentitydefs as compat_html_entities
|
import htmlentitydefs as compat_html_entities
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import html.parser as compat_html_parser
|
import html.parser as compat_html_parser
|
||||||
except NameError: # Python 2
|
except ImportError: # Python 2
|
||||||
import HTMLParser as compat_html_parser
|
import HTMLParser as compat_html_parser
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import http.client as compat_html_client
|
import http.client as compat_html_client
|
||||||
except NameError: # Python 2
|
except ImportError: # Python 2
|
||||||
import httplib as compat_html_client
|
import httplib as compat_html_client
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.parse.parse_qs import parse_qs as compat_parse_qs
|
||||||
|
except ImportError: # Python 2
|
||||||
|
from urlparse import parse_qs as compat_parse_qs
|
||||||
|
|
||||||
try:
|
try:
|
||||||
compat_str = unicode # Python 2
|
compat_str = unicode # Python 2
|
||||||
|
Loading…
Reference in New Issue
Block a user