[downloader/dash] Abort if the first segment fails
Closes #10497, Closes #10542
This commit is contained in:
parent
b29cd56591
commit
919cf1a62f
2 changed files with 17 additions and 7 deletions
|
@ -1,5 +1,9 @@
|
||||||
version <unreleased>
|
version <unreleased>
|
||||||
|
|
||||||
|
Core
|
||||||
|
* If the first segment of DASH fails, abort the whole download process to
|
||||||
|
prevent throttling (#10497)
|
||||||
|
|
||||||
Extractors
|
Extractors
|
||||||
* [pornvoisines] Fix extraction (#10469)
|
* [pornvoisines] Fix extraction (#10469)
|
||||||
* [rottentomatoes] Fix extraction (#10467)
|
* [rottentomatoes] Fix extraction (#10467)
|
||||||
|
|
|
@ -40,7 +40,8 @@ class DashSegmentsFD(FragmentFD):
|
||||||
fragment_retries = self.params.get('fragment_retries', 0)
|
fragment_retries = self.params.get('fragment_retries', 0)
|
||||||
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
||||||
|
|
||||||
def append_url_to_file(target_url, tmp_filename, segment_name):
|
def process_segment(segment, tmp_filename, fatal):
|
||||||
|
target_url, segment_name = segment
|
||||||
target_filename = '%s-%s' % (tmp_filename, segment_name)
|
target_filename = '%s-%s' % (tmp_filename, segment_name)
|
||||||
count = 0
|
count = 0
|
||||||
while count <= fragment_retries:
|
while count <= fragment_retries:
|
||||||
|
@ -64,18 +65,23 @@ class DashSegmentsFD(FragmentFD):
|
||||||
if count <= fragment_retries:
|
if count <= fragment_retries:
|
||||||
self.report_retry_fragment(err, segment_name, count, fragment_retries)
|
self.report_retry_fragment(err, segment_name, count, fragment_retries)
|
||||||
if count > fragment_retries:
|
if count > fragment_retries:
|
||||||
if skip_unavailable_fragments:
|
if not fatal:
|
||||||
self.report_skip_fragment(segment_name)
|
self.report_skip_fragment(segment_name)
|
||||||
return True
|
return True
|
||||||
self.report_error('giving up after %s fragment retries' % fragment_retries)
|
self.report_error('giving up after %s fragment retries' % fragment_retries)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if initialization_url:
|
segments_to_download = [(initialization_url, 'Init')] if initialization_url else []
|
||||||
if not append_url_to_file(initialization_url, ctx['tmpfilename'], 'Init'):
|
segments_to_download.extend([
|
||||||
return False
|
(segment_url, 'Seg%d' % i)
|
||||||
for i, segment_url in enumerate(segment_urls):
|
for i, segment_url in enumerate(segment_urls)])
|
||||||
if not append_url_to_file(segment_url, ctx['tmpfilename'], 'Seg%d' % i):
|
|
||||||
|
for i, segment in enumerate(segments_to_download):
|
||||||
|
# In DASH, the first segment contains necessary headers to
|
||||||
|
# generate a valid MP4 file, so always abort for the first segment
|
||||||
|
fatal = i == 0 or not skip_unavailable_fragments
|
||||||
|
if not process_segment(segment, ctx['tmpfilename'], fatal):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._finish_frag_download(ctx)
|
self._finish_frag_download(ctx)
|
||||||
|
|
Loading…
Reference in a new issue