Skip to content

Commit

Permalink
Merge branch 'fix-bilibili-redirect' of https://github.com/rosynirvan…
Browse files Browse the repository at this point in the history
…a/you-get into rosynirvana-fix-bilibili-redirect
  • Loading branch information
soimort committed Jul 7, 2017
2 parents d0a7655 + 9ca5bf1 commit 50d34d2
Show file tree
Hide file tree
Showing 2 changed files with 314 additions and 192 deletions.
25 changes: 21 additions & 4 deletions src/you_get/extractor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python

from .common import match1, maybe_print, download_urls, get_filename, parse_host, set_proxy, unset_proxy
from .common import match1, maybe_print, download_urls, get_filename, parse_host, set_proxy, unset_proxy, get_content, dry_run
from .common import print_more_compatible as print
from .util import log
from . import json_output
Expand Down Expand Up @@ -28,6 +28,10 @@ def __init__(self, *args):
self.password_protected = False
self.dash_streams = {}
self.caption_tracks = {}
self.out = False
self.ua = None
self.referer = None
self.danmuku = None

if args:
self.url = args[0]
Expand All @@ -39,6 +43,8 @@ def download_by_url(self, url, **kwargs):
if 'extractor_proxy' in kwargs and kwargs['extractor_proxy']:
set_proxy(parse_host(kwargs['extractor_proxy']))
self.prepare(**kwargs)
if self.out:
return
if 'extractor_proxy' in kwargs and kwargs['extractor_proxy']:
unset_proxy()

Expand Down Expand Up @@ -99,7 +105,8 @@ def p_stream(self, stream_id):
print(" quality: %s" % stream['quality'])

if 'size' in stream and stream['container'].lower() != 'm3u8':
print(" size: %s MiB (%s bytes)" % (round(stream['size'] / 1048576, 1), stream['size']))
if stream['size'] != float('inf') and stream['size'] != 0:
print(" size: %s MiB (%s bytes)" % (round(stream['size'] / 1048576, 1), stream['size']))

if 'itag' in stream:
print(" # download-with: %s" % log.sprint("you-get --itag=%s [URL]" % stream_id, log.UNDERLINE))
Expand Down Expand Up @@ -202,12 +209,17 @@ def download(self, **kwargs):
if not urls:
log.wtf('[Failed] Cannot extract video source.')
# For legacy main()
download_urls(urls, self.title, ext, total_size,
headers = {}
if self.ua is not None:
headers['User-Agent'] = self.ua
if self.referer is not None:
headers['Referer'] = self.referer
download_urls(urls, self.title, ext, total_size, headers=headers,
output_dir=kwargs['output_dir'],
merge=kwargs['merge'],
av=stream_id in self.dash_streams)
if 'caption' not in kwargs or not kwargs['caption']:
print('Skipping captions.')
print('Skipping captions or danmuku.')
return
for lang in self.caption_tracks:
filename = '%s.%s.srt' % (get_filename(self.title), lang)
Expand All @@ -217,6 +229,11 @@ def download(self, **kwargs):
'w', encoding='utf-8') as x:
x.write(srt)
print('Done.')
if self.danmuku is not None and not dry_run:
filename = '{}.cmt.xml'.format(get_filename(self.title))
print('Downloading {} ...\n'.format(filename))
with open(os.path.join(kwargs['output_dir'], filename), 'w', encoding='utf8') as fp:
fp.write(self.danmuku)

# For main_dev()
#download_urls(urls, self.title, self.streams[stream_id]['container'], self.streams[stream_id]['size'])
Expand Down
Loading

0 comments on commit 50d34d2

Please sign in to comment.