2015-06-04 00:10:18 +09:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import re
|
|
|
|
|
2015-06-04 23:12:05 +09:00
|
|
|
from .common import FileDownloader
|
2015-11-22 01:18:17 +09:00
|
|
|
from ..utils import sanitized_Request
|
2015-06-04 23:12:05 +09:00
|
|
|
|
2015-06-04 00:10:18 +09:00
|
|
|
|
|
|
|
class DashSegmentsFD(FileDownloader):
|
|
|
|
"""
|
|
|
|
Download segments in a DASH manifest
|
|
|
|
"""
|
|
|
|
def real_download(self, filename, info_dict):
|
|
|
|
self.report_destination(filename)
|
|
|
|
tmpfilename = self.temp_name(filename)
|
|
|
|
base_url = info_dict['url']
|
|
|
|
segment_urls = info_dict['segment_urls']
|
|
|
|
|
2015-06-10 15:45:54 +09:00
|
|
|
is_test = self.params.get('test', False)
|
|
|
|
remaining_bytes = self._TEST_FILE_SIZE if is_test else None
|
2015-06-10 14:44:54 +09:00
|
|
|
byte_counter = 0
|
2015-06-04 00:10:18 +09:00
|
|
|
|
2015-06-10 15:45:54 +09:00
|
|
|
def append_url_to_file(outf, target_url, target_name, remaining_bytes=None):
|
2015-06-04 00:10:18 +09:00
|
|
|
self.to_screen('[DashSegments] %s: Downloading %s' % (info_dict['id'], target_name))
|
2015-11-22 01:18:17 +09:00
|
|
|
req = sanitized_Request(target_url)
|
2015-06-10 15:45:54 +09:00
|
|
|
if remaining_bytes is not None:
|
|
|
|
req.add_header('Range', 'bytes=0-%d' % (remaining_bytes - 1))
|
|
|
|
|
2015-06-04 00:10:18 +09:00
|
|
|
data = self.ydl.urlopen(req).read()
|
2015-06-10 15:45:54 +09:00
|
|
|
|
|
|
|
if remaining_bytes is not None:
|
|
|
|
data = data[:remaining_bytes]
|
|
|
|
|
2015-06-04 00:10:18 +09:00
|
|
|
outf.write(data)
|
2015-06-10 14:44:54 +09:00
|
|
|
return len(data)
|
2015-06-04 00:10:18 +09:00
|
|
|
|
|
|
|
def combine_url(base_url, target_url):
|
|
|
|
if re.match(r'^https?://', target_url):
|
|
|
|
return target_url
|
2015-07-23 19:09:30 +09:00
|
|
|
return '%s%s%s' % (base_url, '' if base_url.endswith('/') else '/', target_url)
|
2015-06-04 00:10:18 +09:00
|
|
|
|
|
|
|
with open(tmpfilename, 'wb') as outf:
|
|
|
|
append_url_to_file(
|
|
|
|
outf, combine_url(base_url, info_dict['initialization_url']),
|
|
|
|
'initialization segment')
|
|
|
|
for i, segment_url in enumerate(segment_urls):
|
2015-06-10 14:44:54 +09:00
|
|
|
segment_len = append_url_to_file(
|
2015-06-04 00:10:18 +09:00
|
|
|
outf, combine_url(base_url, segment_url),
|
2015-06-10 15:45:54 +09:00
|
|
|
'segment %d / %d' % (i + 1, len(segment_urls)),
|
|
|
|
remaining_bytes)
|
2015-06-10 14:44:54 +09:00
|
|
|
byte_counter += segment_len
|
2015-06-10 15:45:54 +09:00
|
|
|
if remaining_bytes is not None:
|
|
|
|
remaining_bytes -= segment_len
|
|
|
|
if remaining_bytes <= 0:
|
|
|
|
break
|
2015-06-04 00:10:18 +09:00
|
|
|
|
|
|
|
self.try_rename(tmpfilename, filename)
|
|
|
|
|
|
|
|
self._hook_progress({
|
2015-06-10 14:44:54 +09:00
|
|
|
'downloaded_bytes': byte_counter,
|
|
|
|
'total_bytes': byte_counter,
|
2015-06-04 00:10:18 +09:00
|
|
|
'filename': filename,
|
|
|
|
'status': 'finished',
|
|
|
|
})
|
|
|
|
|
|
|
|
return True
|