mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-12-28 01:41:06 +00:00
[cleanup] Misc cleanup
This commit is contained in:
parent
5d5c0f7e99
commit
2414649192
22 changed files with 79 additions and 103 deletions
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
|
|
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
|
|
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
required: true
|
||||
|
|
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.04.08** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
|
|
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
|
|
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
|
|
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
required: true
|
||||
|
|
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
|
@ -13,7 +13,7 @@ body:
|
|||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are alive and playable in a browser
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
|
|
|
@ -166,7 +166,8 @@ ### Using the release binary
|
|||
|
||||
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
||||
[![Windows](https://img.shields.io/badge/-Windows_x64-blue.svg?style=for-the-badge&logo=windows)](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)
|
||||
[![Linux](https://img.shields.io/badge/-Linux/MacOS/BSD-red.svg?style=for-the-badge&logo=linux)](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)
|
||||
[![Linux](https://img.shields.io/badge/-Linux/BSD-red.svg?style=for-the-badge&logo=linux)](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)
|
||||
[![MacOS](https://img.shields.io/badge/-MacOS-lightblue.svg?style=for-the-badge&logo=apple)](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)
|
||||
[![Source Tarball](https://img.shields.io/badge/-Source_tar-green.svg?style=for-the-badge)](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||
[![Other variants](https://img.shields.io/badge/-Other-grey.svg?style=for-the-badge)](#release-files)
|
||||
[![All versions](https://img.shields.io/badge/-All_Versions-lightgrey.svg?style=for-the-badge)](https://github.com/yt-dlp/yt-dlp/releases)
|
||||
|
@ -244,7 +245,7 @@ #### Alternatives
|
|||
File|Description
|
||||
:---|:---
|
||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Vista SP2+) standalone x86 (32-bit) binary
|
||||
[yt-dlp_min.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_min.exe)|Windows (Win7 SP1+) standalone x64 binary built with `py2exe`.<br/> Does not contain `pycryptodomex`, needs VC++14
|
||||
[yt-dlp_min.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_min.exe)|Windows (Win7 SP1+) standalone x64 binary built with `py2exe`.<br/> Does not contain `pycryptodomex`/`certifi`, needs VC++14
|
||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows executable (no auto-update)
|
||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||
|
||||
|
|
|
@ -2,13 +2,18 @@
|
|||
import random
|
||||
import re
|
||||
|
||||
from ..utils import bug_reports_message, classproperty, write_string
|
||||
from ..utils import (
|
||||
age_restricted,
|
||||
bug_reports_message,
|
||||
classproperty,
|
||||
write_string,
|
||||
)
|
||||
|
||||
|
||||
class LazyLoadMetaClass(type):
|
||||
def __getattr__(cls, name):
|
||||
# "is_suitable" requires "_TESTS". However, they bloat the lazy_extractors
|
||||
if '_real_class' not in cls.__dict__ and name not in ('is_suitable', 'get_testcases'):
|
||||
# "_TESTS" bloat the lazy_extractors
|
||||
if '_real_class' not in cls.__dict__ and name != 'get_testcases':
|
||||
write_string(
|
||||
'WARNING: Falling back to normal extractor since lazy extractor '
|
||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||
|
|
|
@ -8,9 +8,9 @@
|
|||
|
||||
|
||||
NO_ATTR = object()
|
||||
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_WORKING', '_NETRC_MACHINE']
|
||||
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_WORKING', '_NETRC_MACHINE', 'age_limit']
|
||||
CLASS_METHODS = [
|
||||
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id',
|
||||
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id', 'is_suitable'
|
||||
]
|
||||
IE_TEMPLATE = '''
|
||||
class {name}({bases}):
|
||||
|
|
|
@ -140,7 +140,7 @@ def do_GET(self):
|
|||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'text/plain; charset=utf-8')
|
||||
self.end_headers()
|
||||
self.wfile.write('{self.proxy_name}: {self.path}'.format(self=self).encode())
|
||||
self.wfile.write(f'{self.proxy_name}: {self.path}'.encode())
|
||||
return HTTPTestRequestHandler
|
||||
|
||||
|
||||
|
|
|
@ -1322,7 +1322,6 @@ def check_filter():
|
|||
return None
|
||||
elif reply == 'n':
|
||||
return f'Skipping {video_title}'
|
||||
return True
|
||||
elif ret is not None:
|
||||
return ret
|
||||
return None
|
||||
|
@ -3191,18 +3190,18 @@ def ffmpeg_fixup(cndn, msg, cls):
|
|||
FFmpegFixupM4aPP)
|
||||
|
||||
downloader = get_suitable_downloader(info_dict, self.params) if 'protocol' in info_dict else None
|
||||
downloader = downloader.__name__ if downloader else None
|
||||
downloader = downloader.FD_NAME if downloader else None
|
||||
|
||||
if info_dict.get('requested_formats') is None: # Not necessary if doing merger
|
||||
ffmpeg_fixup(downloader == 'HlsFD' and not self.params.get('hls_use_mpegts')
|
||||
ffmpeg_fixup(downloader == 'hlsnative' and not self.params.get('hls_use_mpegts')
|
||||
or info_dict.get('is_live') and self.params.get('hls_use_mpegts') is None,
|
||||
'Possible MPEG-TS in MP4 container or malformed AAC timestamps',
|
||||
FFmpegFixupM3u8PP)
|
||||
ffmpeg_fixup(info_dict.get('is_live') and downloader == 'DashSegmentsFD',
|
||||
'Possible duplicate MOOV atoms', FFmpegFixupDuplicateMoovPP)
|
||||
|
||||
ffmpeg_fixup(downloader == 'WebSocketFragmentFD', 'Malformed timestamps detected', FFmpegFixupTimestampPP)
|
||||
ffmpeg_fixup(downloader == 'WebSocketFragmentFD', 'Malformed duration detected', FFmpegFixupDurationPP)
|
||||
ffmpeg_fixup(downloader == 'web_socket_fragment', 'Malformed timestamps detected', FFmpegFixupTimestampPP)
|
||||
ffmpeg_fixup(downloader == 'web_socket_fragment', 'Malformed duration detected', FFmpegFixupDurationPP)
|
||||
|
||||
fixup()
|
||||
try:
|
||||
|
|
|
@ -282,7 +282,7 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger):
|
|||
else:
|
||||
failed_message = ''
|
||||
logger.info(f'Extracted {len(jar)} cookies from {browser_name}{failed_message}')
|
||||
counts = decryptor.cookie_counts.copy()
|
||||
counts = decryptor._cookie_counts.copy()
|
||||
counts['unencrypted'] = unencrypted_cookies
|
||||
logger.debug(f'cookie version breakdown: {counts}')
|
||||
return jar
|
||||
|
@ -340,7 +340,7 @@ def decrypt(self, encrypted_value):
|
|||
raise NotImplementedError('Must be implemented by sub classes')
|
||||
|
||||
@property
|
||||
def cookie_counts(self):
|
||||
def _cookie_counts(self):
|
||||
raise NotImplementedError('Must be implemented by sub classes')
|
||||
|
||||
|
||||
|
@ -369,10 +369,6 @@ def derive_key(password):
|
|||
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/os_crypt_linux.cc
|
||||
return pbkdf2_sha1(password, salt=b'saltysalt', iterations=1, key_length=16)
|
||||
|
||||
@property
|
||||
def cookie_counts(self):
|
||||
return self._cookie_counts
|
||||
|
||||
def decrypt(self, encrypted_value):
|
||||
version = encrypted_value[:3]
|
||||
ciphertext = encrypted_value[3:]
|
||||
|
@ -406,10 +402,6 @@ def derive_key(password):
|
|||
# https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/os_crypt_mac.mm
|
||||
return pbkdf2_sha1(password, salt=b'saltysalt', iterations=1003, key_length=16)
|
||||
|
||||
@property
|
||||
def cookie_counts(self):
|
||||
return self._cookie_counts
|
||||
|
||||
def decrypt(self, encrypted_value):
|
||||
version = encrypted_value[:3]
|
||||
ciphertext = encrypted_value[3:]
|
||||
|
@ -435,10 +427,6 @@ def __init__(self, browser_root, logger):
|
|||
self._v10_key = _get_windows_v10_key(browser_root, logger)
|
||||
self._cookie_counts = {'v10': 0, 'other': 0}
|
||||
|
||||
@property
|
||||
def cookie_counts(self):
|
||||
return self._cookie_counts
|
||||
|
||||
def decrypt(self, encrypted_value):
|
||||
version = encrypted_value[:3]
|
||||
ciphertext = encrypted_value[3:]
|
||||
|
|
|
@ -53,11 +53,6 @@ def _real_initialize(self):
|
|||
class AnimeLabIE(AnimeLabBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?animelab\.com/player/(?P<id>[^/]+)'
|
||||
|
||||
# the following tests require authentication, but a free account will suffice
|
||||
# just set 'usenetrc' to true in test/local_parameters.json if you use a .netrc file
|
||||
# or you can set 'username' and 'password' there
|
||||
# the tests also select a specific format so that the same video is downloaded
|
||||
# regardless of whether the user is premium or not (needs testing on a premium account)
|
||||
_TEST = {
|
||||
'url': 'https://www.animelab.com/player/fullmetal-alchemist-brotherhood-episode-42',
|
||||
'md5': '05bde4b91a5d1ff46ef5b94df05b0f7f',
|
||||
|
@ -76,9 +71,9 @@ class AnimeLabIE(AnimeLabBaseIE):
|
|||
'season_id': '38',
|
||||
},
|
||||
'params': {
|
||||
# Ensure the same video is downloaded whether the user is premium or not
|
||||
'format': '[format_id=21711_yeshardsubbed_ja-JP][height=480]',
|
||||
},
|
||||
'skip': 'All AnimeLab content requires authentication',
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
|
|
|
@ -3639,20 +3639,17 @@ def get_testcases(cls, include_onlymatching=False):
|
|||
t['name'] = cls.ie_key()
|
||||
yield t
|
||||
|
||||
@classproperty
|
||||
def age_limit(cls):
|
||||
"""Get age limit from the testcases"""
|
||||
return max(traverse_obj(
|
||||
tuple(cls.get_testcases(include_onlymatching=False)),
|
||||
(..., (('playlist', 0), None), 'info_dict', 'age_limit')) or [0])
|
||||
|
||||
@classmethod
|
||||
def is_suitable(cls, age_limit):
|
||||
""" Test whether the extractor is generally suitable for the given
|
||||
age limit (i.e. pornographic sites are not, all others usually are) """
|
||||
|
||||
any_restricted = False
|
||||
for tc in cls.get_testcases(include_onlymatching=False):
|
||||
if tc.get('playlist', []):
|
||||
tc = tc['playlist'][0]
|
||||
is_restricted = age_restricted(tc.get('info_dict', {}).get('age_limit'), age_limit)
|
||||
if not is_restricted:
|
||||
return True
|
||||
any_restricted = any_restricted or is_restricted
|
||||
return not any_restricted
|
||||
"""Test whether the extractor is generally suitable for the given age limit"""
|
||||
return not age_restricted(cls.age_limit, age_limit)
|
||||
|
||||
@classmethod
|
||||
def description(cls, *, markdown=True, search_examples=None):
|
||||
|
@ -3745,11 +3742,15 @@ def extract_automatic_captions(self, *args, **kwargs):
|
|||
def _get_automatic_captions(self, *args, **kwargs):
|
||||
raise NotImplementedError('This method must be implemented by subclasses')
|
||||
|
||||
@property
|
||||
def _cookies_passed(self):
|
||||
"""Whether cookies have been passed to YoutubeDL"""
|
||||
return self.get_param('cookiefile') is not None or self.get_param('cookiesfrombrowser') is not None
|
||||
|
||||
def mark_watched(self, *args, **kwargs):
|
||||
if not self.get_param('mark_watched', False):
|
||||
return
|
||||
if (self.supports_login() and self._get_login_info()[0] is not None
|
||||
or self.get_param('cookiefile') or self.get_param('cookiesfrombrowser')):
|
||||
if self.supports_login() and self._get_login_info()[0] is not None or self._cookies_passed:
|
||||
self._mark_watched(*args, **kwargs)
|
||||
|
||||
def _mark_watched(self, *args, **kwargs):
|
||||
|
|
|
@ -4106,7 +4106,7 @@ def filter_video(urls):
|
|||
entries.append(entry_info_dict)
|
||||
|
||||
if len(entries) == 1:
|
||||
return entries[0]
|
||||
return merge_dicts(entries[0], info_dict)
|
||||
else:
|
||||
for num, e in enumerate(entries, start=1):
|
||||
# 'url' results don't have a title
|
||||
|
|
|
@ -119,7 +119,7 @@ def _vimeo_sort_formats(self, formats):
|
|||
|
||||
def _parse_config(self, config, video_id):
|
||||
video_data = config['video']
|
||||
video_title = video_data['title']
|
||||
video_title = video_data.get('title')
|
||||
live_event = video_data.get('live_event') or {}
|
||||
is_live = live_event.get('status') == 'started'
|
||||
request = config.get('request') or {}
|
||||
|
|
|
@ -590,7 +590,6 @@ class VKWallPostIE(VKBaseIE):
|
|||
}],
|
||||
'params': {
|
||||
'skip_download': True,
|
||||
'usenetrc': True,
|
||||
},
|
||||
'skip': 'Requires vk account credentials',
|
||||
}, {
|
||||
|
@ -601,9 +600,6 @@ class VKWallPostIE(VKBaseIE):
|
|||
'title': 'Сергей Горбунов - Wall post 85155021_6319',
|
||||
},
|
||||
'playlist_count': 1,
|
||||
'params': {
|
||||
'usenetrc': True,
|
||||
},
|
||||
'skip': 'Requires vk account credentials',
|
||||
}, {
|
||||
# wall page URL
|
||||
|
|
|
@ -394,9 +394,7 @@ def _real_initialize(self):
|
|||
self._check_login_required()
|
||||
|
||||
def _check_login_required(self):
|
||||
if (self._LOGIN_REQUIRED
|
||||
and self.get_param('cookiefile') is None
|
||||
and self.get_param('cookiesfrombrowser') is None):
|
||||
if self._LOGIN_REQUIRED and not self._cookies_passed:
|
||||
self.raise_login_required('Login details are needed to download this content', method='cookies')
|
||||
|
||||
_YT_INITIAL_DATA_RE = r'(?:window\s*\[\s*["\']ytInitialData["\']\s*\]|ytInitialData)\s*=\s*({.+?})\s*;'
|
||||
|
@ -4282,8 +4280,7 @@ def _extract_inline_playlist(self, playlist, playlist_id, data, ytcfg):
|
|||
start = next((i for i, v in enumerate(videos) if v['id'] == last_id), -1) + 1
|
||||
if start >= len(videos):
|
||||
return
|
||||
for video in videos[start:]:
|
||||
yield video
|
||||
yield from videos[start:]
|
||||
first_id = first_id or videos[0]['id']
|
||||
last_id = videos[-1]['id']
|
||||
watch_endpoint = try_get(
|
||||
|
|
|
@ -59,7 +59,7 @@ def _call_api(self, url_type, params, display_id=None, **kwargs):
|
|||
return (resp or {}).get('data') or {}
|
||||
|
||||
def _real_initialize(self):
|
||||
if not self.get_param('cookiefile') and not self.get_param('cookiesfrombrowser'):
|
||||
if not self._cookies_passed:
|
||||
self._request_webpage(
|
||||
self._api_url('bai-hat', {'id': ''}), None, note='Updating cookies')
|
||||
|
||||
|
|
|
@ -1445,6 +1445,10 @@ def __init__(self, filename=None, *args, **kwargs):
|
|||
filename = os.fspath(filename)
|
||||
self.filename = filename
|
||||
|
||||
@staticmethod
|
||||
def _true_or_false(cndn):
|
||||
return 'TRUE' if cndn else 'FALSE'
|
||||
|
||||
@staticmethod
|
||||
def is_path(file):
|
||||
return isinstance(file, (str, bytes, os.PathLike))
|
||||
|
@ -1459,57 +1463,47 @@ def open(self, file, *, write=False):
|
|||
file.truncate(0)
|
||||
yield file
|
||||
|
||||
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
|
||||
def _really_save(self, f, ignore_discard=False, ignore_expires=False):
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if (not ignore_discard and cookie.discard
|
||||
or not ignore_expires and cookie.is_expired(now)):
|
||||
continue
|
||||
name, value = cookie.name, cookie.value
|
||||
if value is None:
|
||||
# cookies.txt regards 'Set-Cookie: foo' as a cookie
|
||||
# with no name, whereas http.cookiejar regards it as a
|
||||
# cookie with no value.
|
||||
name, value = '', name
|
||||
f.write('%s\n' % '\t'.join((
|
||||
cookie.domain,
|
||||
self._true_or_false(cookie.domain.startswith('.')),
|
||||
cookie.path,
|
||||
self._true_or_false(cookie.secure),
|
||||
str_or_none(cookie.expires, default=''),
|
||||
name, value
|
||||
)))
|
||||
|
||||
def save(self, filename=None, *args, **kwargs):
|
||||
"""
|
||||
Save cookies to a file.
|
||||
Code is taken from CPython 3.6
|
||||
https://github.com/python/cpython/blob/8d999cbf4adea053be6dbb612b9844635c4dfb8e/Lib/http/cookiejar.py#L2091-L2117 """
|
||||
|
||||
Most of the code is taken from CPython 3.8 and slightly adapted
|
||||
to support cookie files with UTF-8 in both python 2 and 3.
|
||||
"""
|
||||
if filename is None:
|
||||
if self.filename is not None:
|
||||
filename = self.filename
|
||||
else:
|
||||
raise ValueError(compat_cookiejar.MISSING_FILENAME_TEXT)
|
||||
|
||||
# Store session cookies with `expires` set to 0 instead of an empty
|
||||
# string
|
||||
# Store session cookies with `expires` set to 0 instead of an empty string
|
||||
for cookie in self:
|
||||
if cookie.expires is None:
|
||||
cookie.expires = 0
|
||||
|
||||
with self.open(filename, write=True) as f:
|
||||
f.write(self._HEADER)
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if not ignore_discard and cookie.discard:
|
||||
continue
|
||||
if not ignore_expires and cookie.is_expired(now):
|
||||
continue
|
||||
if cookie.secure:
|
||||
secure = 'TRUE'
|
||||
else:
|
||||
secure = 'FALSE'
|
||||
if cookie.domain.startswith('.'):
|
||||
initial_dot = 'TRUE'
|
||||
else:
|
||||
initial_dot = 'FALSE'
|
||||
if cookie.expires is not None:
|
||||
expires = compat_str(cookie.expires)
|
||||
else:
|
||||
expires = ''
|
||||
if cookie.value is None:
|
||||
# cookies.txt regards 'Set-Cookie: foo' as a cookie
|
||||
# with no name, whereas http.cookiejar regards it as a
|
||||
# cookie with no value.
|
||||
name = ''
|
||||
value = cookie.name
|
||||
else:
|
||||
name = cookie.name
|
||||
value = cookie.value
|
||||
f.write(
|
||||
'\t'.join([cookie.domain, initial_dot, cookie.path,
|
||||
secure, expires, name, value]) + '\n')
|
||||
self._really_save(f, *args, **kwargs)
|
||||
|
||||
def load(self, filename=None, ignore_discard=False, ignore_expires=False):
|
||||
"""Load cookies from a file."""
|
||||
|
|
Loading…
Reference in a new issue