mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-10-03 10:13:45 -04:00
Preparing for release
This commit is contained in:
@@ -20,12 +20,14 @@ from ..utils import (
|
||||
ExtractorError,
|
||||
float_or_none,
|
||||
HEADRequest,
|
||||
int_or_none,
|
||||
is_html,
|
||||
js_to_json,
|
||||
KNOWN_EXTENSIONS,
|
||||
merge_dicts,
|
||||
mimetype2ext,
|
||||
orderedSet,
|
||||
parse_duration,
|
||||
sanitized_Request,
|
||||
smuggle_url,
|
||||
unescapeHTML,
|
||||
@@ -35,6 +37,7 @@ from ..utils import (
|
||||
url_or_none,
|
||||
xpath_attr,
|
||||
xpath_text,
|
||||
xpath_with_ns,
|
||||
)
|
||||
from .commonprotocols import RtmpIE
|
||||
from .brightcove import (
|
||||
|
@@ -2,7 +2,6 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from .brightcove import BrightcoveNewIE
|
||||
|
@@ -1,6 +1,5 @@
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
import json
|
||||
|
||||
from .telecinco import TelecincoIE
|
||||
from ..utils import (
|
||||
|
@@ -324,7 +324,7 @@ def _make_video_result(node):
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': TwitchVodIE.ie_key(),
|
||||
'id': 'v'+ video_id,
|
||||
'id': 'v' + video_id,
|
||||
'url': 'https://www.twitch.tv/videos/%s' % video_id,
|
||||
'title': node.get('title'),
|
||||
'thumbnail': node.get('previewThumbnailURL'),
|
||||
|
@@ -47,6 +47,7 @@ class WDRIE(InfoExtractor):
|
||||
media_resource = metadata['mediaResource']
|
||||
|
||||
formats = []
|
||||
subtitles = {}
|
||||
|
||||
# check if the metadata contains a direct URL to a file
|
||||
for kind, media in media_resource.items():
|
||||
@@ -93,7 +94,6 @@ class WDRIE(InfoExtractor):
|
||||
|
||||
self._sort_formats(formats)
|
||||
|
||||
subtitles = {}
|
||||
caption_url = media_resource.get('captionURL')
|
||||
if caption_url:
|
||||
subtitles['de'] = [{
|
||||
|
@@ -3342,7 +3342,7 @@ class YoutubeTabIE(YoutubeBaseInfoExtractor):
|
||||
if is_home is not None and is_home.group('not_channel') is None and item_id != 'feed':
|
||||
self._downloader.report_warning(
|
||||
'A channel/user page was given. All the channel\'s videos will be downloaded. '
|
||||
'To download only the videos in the home page, add a "/home" to the URL')
|
||||
'To download only the videos in the home page, add a "/featured" to the URL')
|
||||
url = '%s/videos%s' % (is_home.group('pre'), is_home.group('post') or '')
|
||||
|
||||
# Handle both video/playlist URLs
|
||||
@@ -3464,6 +3464,7 @@ class YoutubePlaylistIE(InfoExtractor):
|
||||
|
||||
|
||||
class YoutubeYtBeIE(InfoExtractor):
|
||||
IE_DESC = 'youtu.be'
|
||||
_VALID_URL = r'https?://youtu\.be/(?P<id>[0-9A-Za-z_-]{11})/*?.*?\blist=(?P<playlist_id>%(playlist_id)s)' % {'playlist_id': YoutubeBaseInfoExtractor._PLAYLIST_ID_RE}
|
||||
_TESTS = [{
|
||||
'url': 'https://youtu.be/yeWKywCrFtk?list=PL2qgrgXsNUG5ig9cat4ohreBjYLAPC0J5',
|
||||
@@ -3503,6 +3504,7 @@ class YoutubeYtBeIE(InfoExtractor):
|
||||
|
||||
|
||||
class YoutubeYtUserIE(InfoExtractor):
|
||||
IE_DESC = 'YouTube.com user videos, URL or "ytuser" keyword'
|
||||
_VALID_URL = r'ytuser:(?P<id>.+)'
|
||||
_TESTS = [{
|
||||
'url': 'ytuser:phihag',
|
||||
@@ -3647,12 +3649,12 @@ class YoutubeSearchIE(SearchInfoExtractor, YoutubeBaseInfoExtractor):
|
||||
class YoutubeSearchDateIE(YoutubeSearchIE):
|
||||
IE_NAME = YoutubeSearchIE.IE_NAME + ':date'
|
||||
_SEARCH_KEY = 'ytsearchdate'
|
||||
IE_DESC = 'YouTube.com searches, newest videos first'
|
||||
IE_DESC = 'YouTube.com searches, newest videos first, "ytsearchdate" keyword'
|
||||
_SEARCH_PARAMS = 'CAI%3D'
|
||||
|
||||
|
||||
class YoutubeSearchURLIE(YoutubeSearchIE):
|
||||
IE_DESC = 'YouTube.com search URLs'
|
||||
IE_DESC = 'YouTube.com searches, "ytsearch" keyword'
|
||||
IE_NAME = YoutubeSearchIE.IE_NAME + '_url'
|
||||
_VALID_URL = r'https?://(?:www\.)?youtube\.com/results\?(.*?&)?(?:search_query|q)=(?:[^&]+)(?:[&]|$)'
|
||||
# _MAX_RESULTS = 100
|
||||
|
@@ -140,7 +140,7 @@ def parseOpts(overrideArguments=None):
|
||||
general.add_option(
|
||||
'-U', '--update',
|
||||
action='store_true', dest='update_self',
|
||||
help='Update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)')
|
||||
help='[BROKEN] Update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)')
|
||||
general.add_option(
|
||||
'-i', '--ignore-errors', '--no-abort-on-error',
|
||||
action='store_true', dest='ignoreerrors', default=True,
|
||||
@@ -300,15 +300,22 @@ def parseOpts(overrideArguments=None):
|
||||
selection.add_option(
|
||||
'--date',
|
||||
metavar='DATE', dest='date', default=None,
|
||||
help='Download only videos uploaded in this date')
|
||||
help=(
|
||||
'Download only videos uploaded in this date.'
|
||||
'The date can be "YYYYMMDD" or in the format'
|
||||
'"(now|today)[+-][0-9](day|week|month|year)(s)?"'))
|
||||
selection.add_option(
|
||||
'--datebefore',
|
||||
metavar='DATE', dest='datebefore', default=None,
|
||||
help='Download only videos uploaded on or before this date (i.e. inclusive)')
|
||||
help=(
|
||||
'Download only videos uploaded on or before this date. '
|
||||
'The date formats accepted is the same as --date'))
|
||||
selection.add_option(
|
||||
'--dateafter',
|
||||
metavar='DATE', dest='dateafter', default=None,
|
||||
help='Download only videos uploaded on or after this date (i.e. inclusive)')
|
||||
help=(
|
||||
'Download only videos uploaded on or after this date. '
|
||||
'The date formats accepted is the same as --date'))
|
||||
selection.add_option(
|
||||
'--min-views',
|
||||
metavar='COUNT', dest='min_views', default=None, type=int,
|
||||
@@ -420,7 +427,7 @@ def parseOpts(overrideArguments=None):
|
||||
action='store', dest='format', metavar='FORMAT', default=None,
|
||||
help='Video format code, see "FORMAT SELECTION" for more details')
|
||||
video_format.add_option(
|
||||
'-S', '--format-sort',
|
||||
'-S', '--format-sort', metavar='SORTORDER',
|
||||
dest='format_sort', default=[],
|
||||
action='callback', callback=_comma_separated_values_options_callback, type='str',
|
||||
help='Sort the formats by the fields given, see "Sorting Formats" for more details')
|
||||
@@ -545,13 +552,13 @@ def parseOpts(overrideArguments=None):
|
||||
dest='fragment_retries', metavar='RETRIES', default=10,
|
||||
help='Number of retries for a fragment (default is %default), or "infinite" (DASH, hlsnative and ISM)')
|
||||
downloader.add_option(
|
||||
'--skip-unavailable-fragments','--no-abort-on-unavailable-fragment',
|
||||
'--skip-unavailable-fragments', '--no-abort-on-unavailable-fragment',
|
||||
action='store_true', dest='skip_unavailable_fragments', default=True,
|
||||
help='Skip unavailable fragments for DASH, hlsnative and ISM (default)')
|
||||
downloader.add_option(
|
||||
'--abort-on-unavailable-fragment', '--no-skip-unavailable-fragments',
|
||||
action='store_false', dest='skip_unavailable_fragments',
|
||||
help='Abort downloading when some fragment is not available')
|
||||
help='Abort downloading when some fragment is unavailable')
|
||||
downloader.add_option(
|
||||
'--keep-fragments',
|
||||
action='store_true', dest='keep_fragments', default=False,
|
||||
@@ -588,7 +595,7 @@ def parseOpts(overrideArguments=None):
|
||||
help='Download playlist videos in reverse order')
|
||||
downloader.add_option(
|
||||
'--no-playlist-reverse',
|
||||
action='store_false', dest='playlist_reverse',
|
||||
action='store_false', dest='playlist_reverse',
|
||||
help='Download playlist videos in default order (default)')
|
||||
downloader.add_option(
|
||||
'--playlist-random',
|
||||
@@ -617,7 +624,7 @@ def parseOpts(overrideArguments=None):
|
||||
dest='external_downloader', metavar='COMMAND',
|
||||
help=(
|
||||
'Use the specified external downloader. '
|
||||
'Currently supports %s' % ','.join(list_external_downloaders()) ))
|
||||
'Currently supports %s' % ','.join(list_external_downloaders())))
|
||||
downloader.add_option(
|
||||
'--external-downloader-args',
|
||||
dest='external_downloader_args', metavar='ARGS',
|
||||
@@ -670,7 +677,7 @@ def parseOpts(overrideArguments=None):
|
||||
'(maximum possible number of seconds to sleep). Must only be used '
|
||||
'along with --min-sleep-interval.'))
|
||||
workarounds.add_option(
|
||||
'--sleep-subtitles',
|
||||
'--sleep-subtitles', metavar='SECONDS',
|
||||
dest='sleep_interval_subtitles', default=0, type=int,
|
||||
help='Enforce sleep interval on subtitles as well')
|
||||
|
||||
@@ -731,14 +738,14 @@ def parseOpts(overrideArguments=None):
|
||||
'-J', '--dump-single-json',
|
||||
action='store_true', dest='dump_single_json', default=False,
|
||||
help=(
|
||||
'Simulate, quiet but print JSON information for each command-line argument.'
|
||||
'Simulate, quiet but print JSON information for each command-line argument. '
|
||||
'If the URL refers to a playlist, dump the whole playlist information in a single line.'))
|
||||
verbosity.add_option(
|
||||
'--print-json',
|
||||
action='store_true', dest='print_json', default=False,
|
||||
help='Be quiet and print the video information as JSON (video is still being downloaded).')
|
||||
verbosity.add_option(
|
||||
'--force-write-download-archive', '--force-write-archive', '--force-download-archive',
|
||||
'--force-write-archive', '--force-write-download-archive', '--force-download-archive',
|
||||
action='store_true', dest='force_write_download_archive', default=False,
|
||||
help=(
|
||||
'Force download archive entries to be written as far as no errors occur,'
|
||||
@@ -900,7 +907,8 @@ def parseOpts(overrideArguments=None):
|
||||
action='store_true', dest='rm_cachedir',
|
||||
help='Delete all filesystem cache files')
|
||||
filesystem.add_option(
|
||||
'--trim-file-name', dest='trim_file_name', default=0, type=int,
|
||||
'--trim-file-name', metavar='LENGTH',
|
||||
dest='trim_file_name', default=0, type=int,
|
||||
help='Limit the filename length (extension excluded)')
|
||||
|
||||
thumbnail = optparse.OptionGroup(parser, 'Thumbnail Images')
|
||||
@@ -955,7 +963,7 @@ def parseOpts(overrideArguments=None):
|
||||
'--remux-video',
|
||||
metavar='FORMAT', dest='remuxvideo', default=None,
|
||||
help=(
|
||||
'Remux the video into another container if necessary (currently supported: mp4|mkv). '
|
||||
'Remux the video into another container if necessary (currently supported: mp4|mkv). '
|
||||
'If target container does not support the video/audio codec, remuxing will fail'))
|
||||
postproc.add_option(
|
||||
'--recode-video',
|
||||
@@ -1048,39 +1056,39 @@ def parseOpts(overrideArguments=None):
|
||||
metavar='FORMAT', dest='convertsubtitles', default=None,
|
||||
help='Convert the subtitles to other format (currently supported: srt|ass|vtt|lrc)')
|
||||
|
||||
extractor = optparse.OptionGroup(parser, 'SponSkrub Options (SponsorBlock)')
|
||||
extractor.add_option(
|
||||
sponskrub = optparse.OptionGroup(parser, 'SponSkrub Options (SponsorBlock)')
|
||||
sponskrub.add_option(
|
||||
'--sponskrub',
|
||||
action='store_true', dest='sponskrub', default=None,
|
||||
help=(
|
||||
'Use sponskrub to mark sponsored sections with the data available in SponsorBlock API. '
|
||||
'This is enabled by default if the sponskrub binary exists (Youtube only)'))
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--no-sponskrub',
|
||||
action='store_false', dest='sponskrub',
|
||||
help='Do not use sponskrub')
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--sponskrub-cut', default=False,
|
||||
action='store_true', dest='sponskrub_cut',
|
||||
help='Cut out the sponsor sections instead of simply marking them')
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--no-sponskrub-cut',
|
||||
action='store_false', dest='sponskrub_cut',
|
||||
help='Simply mark the sponsor sections, not cut them out (default)')
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--sponskrub-force', default=False,
|
||||
action='store_true', dest='sponskrub_force',
|
||||
help='Run sponskrub even if the video was already downloaded')
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--no-sponskrub-force',
|
||||
action='store_true', dest='sponskrub_force',
|
||||
help='Do not cut out the sponsor sections if the video was already downloaded (default)')
|
||||
extractor.add_option(
|
||||
sponskrub.add_option(
|
||||
'--sponskrub-location', metavar='PATH',
|
||||
dest='sponskrub_path', default='',
|
||||
help='Location of the sponskrub binary; either the path to the binary or its containing directory.')
|
||||
extractor.add_option(
|
||||
'--sponskrub-args', dest='sponskrub_args',
|
||||
sponskrub.add_option(
|
||||
'--sponskrub-args', dest='sponskrub_args', metavar='ARGS',
|
||||
help='Give these arguments to sponskrub')
|
||||
|
||||
extractor = optparse.OptionGroup(parser, 'Extractor Options')
|
||||
@@ -1108,6 +1116,7 @@ def parseOpts(overrideArguments=None):
|
||||
parser.add_option_group(authentication)
|
||||
parser.add_option_group(adobe_pass)
|
||||
parser.add_option_group(postproc)
|
||||
parser.add_option_group(sponskrub)
|
||||
parser.add_option_group(extractor)
|
||||
|
||||
if overrideArguments is not None:
|
||||
|
@@ -76,7 +76,7 @@ class EmbedThumbnailPP(FFmpegPostProcessor):
|
||||
|
||||
if info['ext'] == 'mp3':
|
||||
options = [
|
||||
'-c', 'copy', '-map', '0:0', '-map', '1:0', '-id3v2_version', '3',
|
||||
'-c', 'copy', '-map', '0:0', '-map', '1:0', '-id3v2_version', '3',
|
||||
'-metadata:s:v', 'title="Album cover"', '-metadata:s:v', 'comment="Cover (front)"']
|
||||
|
||||
self._downloader.to_screen('[ffmpeg] Adding thumbnail to "%s"' % filename)
|
||||
|
@@ -2323,8 +2323,8 @@ def bug_reports_message():
|
||||
if ytdl_is_updateable():
|
||||
update_cmd = 'type youtube-dlc -U to update'
|
||||
else:
|
||||
update_cmd = 'see https://github.com/blackjack4494/yt-dlc on how to update'
|
||||
msg = '; please report this issue on https://github.com/blackjack4494/yt-dlc .'
|
||||
update_cmd = 'see https://github.com/pukkandan/yt-dlc on how to update'
|
||||
msg = '; please report this issue on https://github.com/pukkandan/yt-dlc .'
|
||||
msg += ' Make sure you are using the latest version; %s.' % update_cmd
|
||||
msg += ' Be sure to call youtube-dlc with the --verbose flag and include its complete output.'
|
||||
return msg
|
||||
@@ -5734,6 +5734,7 @@ def random_birthday(year_field, month_field, day_field):
|
||||
day_field: str(random_date.day),
|
||||
}
|
||||
|
||||
|
||||
# Templates for internet shortcut files, which are plain text files.
|
||||
DOT_URL_LINK_TEMPLATE = '''
|
||||
[InternetShortcut]
|
||||
@@ -5812,6 +5813,7 @@ def to_high_limit_path(path):
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def format_field(obj, field, template='%s', ignore=(None, ''), default='', func=None):
|
||||
val = obj.get(field, default)
|
||||
if func and val not in ignore:
|
||||
|
@@ -1,3 +1,3 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
__version__ = '2020.11.11-2'
|
||||
__version__ = '2021.01.05-2'
|
||||
|
Reference in New Issue
Block a user