mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-23 02:25:11 +00:00
Merge branch 'opener-to-ydl'
This commit is contained in:
commit
79d09f47c2
4 changed files with 97 additions and 97 deletions
|
@ -7,8 +7,10 @@
|
|||
import io
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
|
@ -18,6 +20,7 @@
|
|||
import ctypes
|
||||
|
||||
from .utils import (
|
||||
compat_cookiejar,
|
||||
compat_http_client,
|
||||
compat_print,
|
||||
compat_str,
|
||||
|
@ -32,8 +35,10 @@
|
|||
ExtractorError,
|
||||
format_bytes,
|
||||
locked_file,
|
||||
make_HTTPS_handler,
|
||||
MaxDownloadsReached,
|
||||
PostProcessingError,
|
||||
platform_name,
|
||||
preferredencoding,
|
||||
SameFileError,
|
||||
sanitize_filename,
|
||||
|
@ -42,9 +47,11 @@
|
|||
UnavailableVideoError,
|
||||
write_json_file,
|
||||
write_string,
|
||||
YoutubeDLHandler,
|
||||
)
|
||||
from .extractor import get_info_extractor, gen_extractors
|
||||
from .FileDownloader import FileDownloader
|
||||
from .version import __version__
|
||||
|
||||
|
||||
class YoutubeDL(object):
|
||||
|
@ -122,6 +129,9 @@ class YoutubeDL(object):
|
|||
downloadarchive: File name of a file where all downloads are recorded.
|
||||
Videos already present in the file are not downloaded
|
||||
again.
|
||||
cookiefile: File name where cookies should be read from and dumped to.
|
||||
nocheckcertificate:Do not verify SSL certificates
|
||||
proxy: URL of the proxy server to use
|
||||
|
||||
The following parameters are not used by YoutubeDL itself, they are used by
|
||||
the FileDownloader:
|
||||
|
@ -162,6 +172,8 @@ def __init__(self, params):
|
|||
if '%(stitle)s' in self.params['outtmpl']:
|
||||
self.report_warning(u'%(stitle)s is deprecated. Use the %(title)s and the --restrict-filenames flag(which also secures %(uploader)s et al) instead.')
|
||||
|
||||
self._setup_opener()
|
||||
|
||||
def add_info_extractor(self, ie):
|
||||
"""Add an InfoExtractor object to the end of the list."""
|
||||
self._ies.append(ie)
|
||||
|
@ -243,6 +255,9 @@ def __enter__(self):
|
|||
def __exit__(self, *args):
|
||||
self.restore_console_title()
|
||||
|
||||
if self.params.get('cookiefile') is not None:
|
||||
self.cookiejar.save()
|
||||
|
||||
def fixed_template(self):
|
||||
"""Checks if the output template is fixed."""
|
||||
return (re.search(u'(?u)%\\(.+?\\)s', self.params['outtmpl']) is None)
|
||||
|
@ -789,7 +804,7 @@ def download(self, url_list):
|
|||
for url in url_list:
|
||||
try:
|
||||
#It also downloads the videos
|
||||
videos = self.extract_info(url)
|
||||
self.extract_info(url)
|
||||
except UnavailableVideoError:
|
||||
self.report_error(u'unable to download video')
|
||||
except MaxDownloadsReached:
|
||||
|
@ -914,3 +929,73 @@ def line(format, idlen=20):
|
|||
'_resolution': u'resolution', 'format_note': u'note'}, idlen=idlen)
|
||||
self.to_screen(u'[info] Available formats for %s:\n%s\n%s' %
|
||||
(info_dict['id'], header_line, u"\n".join(formats_s)))
|
||||
|
||||
def urlopen(self, req):
|
||||
""" Start an HTTP download """
|
||||
return self._opener.open(req)
|
||||
|
||||
def print_debug_header(self):
|
||||
if not self.params.get('verbose'):
|
||||
return
|
||||
write_string(u'[debug] youtube-dl version ' + __version__ + u'\n')
|
||||
try:
|
||||
sp = subprocess.Popen(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
cwd=os.path.dirname(os.path.abspath(__file__)))
|
||||
out, err = sp.communicate()
|
||||
out = out.decode().strip()
|
||||
if re.match('[0-9a-f]+', out):
|
||||
write_string(u'[debug] Git HEAD: ' + out + u'\n')
|
||||
except:
|
||||
try:
|
||||
sys.exc_clear()
|
||||
except:
|
||||
pass
|
||||
write_string(u'[debug] Python version %s - %s' %
|
||||
(platform.python_version(), platform_name()) + u'\n')
|
||||
|
||||
proxy_map = {}
|
||||
for handler in self._opener.handlers:
|
||||
if hasattr(handler, 'proxies'):
|
||||
proxy_map.update(handler.proxies)
|
||||
write_string(u'[debug] Proxy map: ' + compat_str(proxy_map) + u'\n')
|
||||
|
||||
def _setup_opener(self, timeout=300):
|
||||
opts_cookiefile = self.params.get('cookiefile')
|
||||
opts_proxy = self.params.get('proxy')
|
||||
|
||||
if opts_cookiefile is None:
|
||||
self.cookiejar = compat_cookiejar.CookieJar()
|
||||
else:
|
||||
self.cookiejar = compat_cookiejar.MozillaCookieJar(
|
||||
opts_cookiefile)
|
||||
if os.access(opts_cookiefile, os.R_OK):
|
||||
self.cookiejar.load()
|
||||
|
||||
cookie_processor = compat_urllib_request.HTTPCookieProcessor(
|
||||
self.cookiejar)
|
||||
if opts_proxy is not None:
|
||||
if opts_proxy == '':
|
||||
proxies = {}
|
||||
else:
|
||||
proxies = {'http': opts_proxy, 'https': opts_proxy}
|
||||
else:
|
||||
proxies = compat_urllib_request.getproxies()
|
||||
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
|
||||
if 'http' in proxies and 'https' not in proxies:
|
||||
proxies['https'] = proxies['http']
|
||||
proxy_handler = compat_urllib_request.ProxyHandler(proxies)
|
||||
https_handler = make_HTTPS_handler(
|
||||
self.params.get('nocheckcertificate', False))
|
||||
opener = compat_urllib_request.build_opener(
|
||||
https_handler, proxy_handler, cookie_processor, YoutubeDLHandler())
|
||||
# Delete the default user-agent header, which would otherwise apply in
|
||||
# cases where our custom HTTP handler doesn't come into play
|
||||
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
|
||||
opener.addheaders = []
|
||||
self._opener = opener
|
||||
|
||||
# TODO remove this global modification
|
||||
compat_urllib_request.install_opener(opener)
|
||||
socket.setdefaulttimeout(timeout)
|
||||
|
|
|
@ -41,45 +41,35 @@
|
|||
__license__ = 'Public Domain'
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import getpass
|
||||
import optparse
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import shlex
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import platform
|
||||
|
||||
|
||||
from .utils import (
|
||||
compat_cookiejar,
|
||||
compat_print,
|
||||
compat_str,
|
||||
compat_urllib_request,
|
||||
DateRange,
|
||||
decodeOption,
|
||||
determine_ext,
|
||||
DownloadError,
|
||||
get_cachedir,
|
||||
make_HTTPS_handler,
|
||||
MaxDownloadsReached,
|
||||
platform_name,
|
||||
preferredencoding,
|
||||
SameFileError,
|
||||
std_headers,
|
||||
write_string,
|
||||
YoutubeDLHandler,
|
||||
)
|
||||
from .update import update_self
|
||||
from .version import __version__
|
||||
from .FileDownloader import (
|
||||
FileDownloader,
|
||||
)
|
||||
from .extractor import gen_extractors
|
||||
from .version import __version__
|
||||
from .YoutubeDL import YoutubeDL
|
||||
from .PostProcessor import (
|
||||
FFmpegMetadataPP,
|
||||
|
@ -452,19 +442,6 @@ def _real_main(argv=None):
|
|||
|
||||
parser, opts, args = parseOpts(argv)
|
||||
|
||||
# Open appropriate CookieJar
|
||||
if opts.cookiefile is None:
|
||||
jar = compat_cookiejar.CookieJar()
|
||||
else:
|
||||
try:
|
||||
jar = compat_cookiejar.MozillaCookieJar(opts.cookiefile)
|
||||
if os.access(opts.cookiefile, os.R_OK):
|
||||
jar.load()
|
||||
except (IOError, OSError) as err:
|
||||
if opts.verbose:
|
||||
traceback.print_exc()
|
||||
write_string(u'ERROR: unable to open cookie file\n')
|
||||
sys.exit(101)
|
||||
# Set user agent
|
||||
if opts.user_agent is not None:
|
||||
std_headers['User-Agent'] = opts.user_agent
|
||||
|
@ -496,8 +473,6 @@ def _real_main(argv=None):
|
|||
all_urls = batchurls + args
|
||||
all_urls = [url.strip() for url in all_urls]
|
||||
|
||||
opener = _setup_opener(jar=jar, opts=opts)
|
||||
|
||||
extractors = gen_extractors()
|
||||
|
||||
if opts.list_extractors:
|
||||
|
@ -552,7 +527,7 @@ def _real_main(argv=None):
|
|||
if opts.retries is not None:
|
||||
try:
|
||||
opts.retries = int(opts.retries)
|
||||
except (TypeError, ValueError) as err:
|
||||
except (TypeError, ValueError):
|
||||
parser.error(u'invalid retry count specified')
|
||||
if opts.buffersize is not None:
|
||||
numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
|
||||
|
@ -563,13 +538,13 @@ def _real_main(argv=None):
|
|||
opts.playliststart = int(opts.playliststart)
|
||||
if opts.playliststart <= 0:
|
||||
raise ValueError(u'Playlist start must be positive')
|
||||
except (TypeError, ValueError) as err:
|
||||
except (TypeError, ValueError):
|
||||
parser.error(u'invalid playlist start number specified')
|
||||
try:
|
||||
opts.playlistend = int(opts.playlistend)
|
||||
if opts.playlistend != -1 and (opts.playlistend <= 0 or opts.playlistend < opts.playliststart):
|
||||
raise ValueError(u'Playlist end must be greater than playlist start')
|
||||
except (TypeError, ValueError) as err:
|
||||
except (TypeError, ValueError):
|
||||
parser.error(u'invalid playlist end number specified')
|
||||
if opts.extractaudio:
|
||||
if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
|
||||
|
@ -672,34 +647,12 @@ def _real_main(argv=None):
|
|||
'youtube_print_sig_code': opts.youtube_print_sig_code,
|
||||
'age_limit': opts.age_limit,
|
||||
'download_archive': opts.download_archive,
|
||||
'cookiefile': opts.cookiefile,
|
||||
'nocheckcertificate': opts.no_check_certificate,
|
||||
}
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
if opts.verbose:
|
||||
write_string(u'[debug] youtube-dl version ' + __version__ + u'\n')
|
||||
try:
|
||||
sp = subprocess.Popen(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
cwd=os.path.dirname(os.path.abspath(__file__)))
|
||||
out, err = sp.communicate()
|
||||
out = out.decode().strip()
|
||||
if re.match('[0-9a-f]+', out):
|
||||
write_string(u'[debug] Git HEAD: ' + out + u'\n')
|
||||
except:
|
||||
try:
|
||||
sys.exc_clear()
|
||||
except:
|
||||
pass
|
||||
write_string(u'[debug] Python version %s - %s' %
|
||||
(platform.python_version(), platform_name()) + u'\n')
|
||||
|
||||
proxy_map = {}
|
||||
for handler in opener.handlers:
|
||||
if hasattr(handler, 'proxies'):
|
||||
proxy_map.update(handler.proxies)
|
||||
write_string(u'[debug] Proxy map: ' + compat_str(proxy_map) + u'\n')
|
||||
|
||||
ydl.print_debug_header()
|
||||
ydl.add_default_info_extractors()
|
||||
|
||||
# PostProcessors
|
||||
|
@ -730,46 +683,9 @@ def _real_main(argv=None):
|
|||
ydl.to_screen(u'--max-download limit reached, aborting.')
|
||||
retcode = 101
|
||||
|
||||
# Dump cookie jar if requested
|
||||
if opts.cookiefile is not None:
|
||||
try:
|
||||
jar.save()
|
||||
except (IOError, OSError):
|
||||
sys.exit(u'ERROR: unable to save cookie jar')
|
||||
|
||||
sys.exit(retcode)
|
||||
|
||||
|
||||
def _setup_opener(jar=None, opts=None, timeout=300):
|
||||
if opts is None:
|
||||
FakeOptions = collections.namedtuple(
|
||||
'FakeOptions', ['proxy', 'no_check_certificate'])
|
||||
opts = FakeOptions(proxy=None, no_check_certificate=False)
|
||||
|
||||
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
|
||||
if opts.proxy is not None:
|
||||
if opts.proxy == '':
|
||||
proxies = {}
|
||||
else:
|
||||
proxies = {'http': opts.proxy, 'https': opts.proxy}
|
||||
else:
|
||||
proxies = compat_urllib_request.getproxies()
|
||||
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
|
||||
if 'http' in proxies and 'https' not in proxies:
|
||||
proxies['https'] = proxies['http']
|
||||
proxy_handler = compat_urllib_request.ProxyHandler(proxies)
|
||||
https_handler = make_HTTPS_handler(opts)
|
||||
opener = compat_urllib_request.build_opener(
|
||||
https_handler, proxy_handler, cookie_processor, YoutubeDLHandler())
|
||||
# Delete the default user-agent header, which would otherwise apply in
|
||||
# cases where our custom HTTP handler doesn't come into play
|
||||
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
|
||||
opener.addheaders = []
|
||||
compat_urllib_request.install_opener(opener)
|
||||
socket.setdefaulttimeout(timeout)
|
||||
return opener
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
try:
|
||||
_real_main(argv)
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
from ..utils import (
|
||||
compat_http_client,
|
||||
compat_urllib_error,
|
||||
compat_urllib_request,
|
||||
compat_str,
|
||||
|
||||
clean_html,
|
||||
|
@ -20,6 +19,7 @@
|
|||
unescapeHTML,
|
||||
)
|
||||
|
||||
|
||||
class InfoExtractor(object):
|
||||
"""Information Extractor class.
|
||||
|
||||
|
@ -158,7 +158,7 @@ def _request_webpage(self, url_or_request, video_id, note=None, errnote=None):
|
|||
elif note is not False:
|
||||
self.to_screen(u'%s: %s' % (video_id, note))
|
||||
try:
|
||||
return compat_urllib_request.urlopen(url_or_request)
|
||||
return self._downloader.urlopen(url_or_request)
|
||||
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
||||
if errnote is None:
|
||||
errnote = u'Unable to download webpage'
|
||||
|
|
|
@ -538,8 +538,7 @@ def formatSeconds(secs):
|
|||
else:
|
||||
return '%d' % secs
|
||||
|
||||
|
||||
def make_HTTPS_handler(opts):
|
||||
def make_HTTPS_handler(opts_no_check_certificate):
|
||||
if sys.version_info < (3, 2):
|
||||
import httplib
|
||||
|
||||
|
@ -566,7 +565,7 @@ def https_open(self, req):
|
|||
context.set_default_verify_paths()
|
||||
|
||||
context.verify_mode = (ssl.CERT_NONE
|
||||
if opts.no_check_certificate
|
||||
if opts_no_check_certificate
|
||||
else ssl.CERT_REQUIRED)
|
||||
return compat_urllib_request.HTTPSHandler(context=context)
|
||||
|
||||
|
|
Loading…
Reference in a new issue