git pushMerge branch 'master' of github.com:rg3/youtube-dl
This commit is contained in:
commit
c93b4eaceb
9 changed files with 206 additions and 12 deletions
|
@ -13,6 +13,7 @@ import os.path
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
|
||||||
from youtube_dl.compat import (
|
from youtube_dl.compat import (
|
||||||
|
compat_input,
|
||||||
compat_http_server,
|
compat_http_server,
|
||||||
compat_str,
|
compat_str,
|
||||||
compat_urlparse,
|
compat_urlparse,
|
||||||
|
@ -30,11 +31,6 @@ try:
|
||||||
except ImportError: # Python 2
|
except ImportError: # Python 2
|
||||||
import SocketServer as compat_socketserver
|
import SocketServer as compat_socketserver
|
||||||
|
|
||||||
try:
|
|
||||||
compat_input = raw_input
|
|
||||||
except NameError: # Python 3
|
|
||||||
compat_input = input
|
|
||||||
|
|
||||||
|
|
||||||
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
|
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
|
||||||
allow_reuse_address = True
|
allow_reuse_address = True
|
||||||
|
|
112
devscripts/create-github-release.py
Normal file
112
devscripts/create-github-release.py
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import netrc
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from youtube_dl.compat import (
|
||||||
|
compat_basestring,
|
||||||
|
compat_input,
|
||||||
|
compat_getpass,
|
||||||
|
compat_print,
|
||||||
|
compat_urllib_request,
|
||||||
|
)
|
||||||
|
from youtube_dl.utils import (
|
||||||
|
make_HTTPS_handler,
|
||||||
|
sanitized_Request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GitHubReleaser(object):
|
||||||
|
_API_URL = 'https://api.github.com/repos/rg3/youtube-dl/releases'
|
||||||
|
_UPLOADS_URL = 'https://uploads.github.com/repos/rg3/youtube-dl/releases/%s/assets?name=%s'
|
||||||
|
_NETRC_MACHINE = 'github.com'
|
||||||
|
|
||||||
|
def __init__(self, debuglevel=0):
|
||||||
|
self._init_github_account()
|
||||||
|
https_handler = make_HTTPS_handler({}, debuglevel=debuglevel)
|
||||||
|
self._opener = compat_urllib_request.build_opener(https_handler)
|
||||||
|
|
||||||
|
def _init_github_account(self):
|
||||||
|
try:
|
||||||
|
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
|
||||||
|
if info is not None:
|
||||||
|
self._username = info[0]
|
||||||
|
self._password = info[2]
|
||||||
|
compat_print('Using GitHub credentials found in .netrc...')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
compat_print('No GitHub credentials found in .netrc')
|
||||||
|
except (IOError, netrc.NetrcParseError):
|
||||||
|
compat_print('Unable to parse .netrc')
|
||||||
|
self._username = compat_input(
|
||||||
|
'Type your GitHub username or email address and press [Return]: ')
|
||||||
|
self._password = compat_getpass(
|
||||||
|
'Type your GitHub password and press [Return]: ')
|
||||||
|
|
||||||
|
def _call(self, req):
|
||||||
|
if isinstance(req, compat_basestring):
|
||||||
|
req = sanitized_Request(req)
|
||||||
|
# Authorizing manually since GitHub does not response with 401 with
|
||||||
|
# WWW-Authenticate header set (see
|
||||||
|
# https://developer.github.com/v3/#basic-authentication)
|
||||||
|
b64 = base64.b64encode(
|
||||||
|
('%s:%s' % (self._username, self._password)).encode('utf-8')).decode('ascii')
|
||||||
|
req.add_header('Authorization', 'Basic %s' % b64)
|
||||||
|
response = self._opener.open(req).read().decode('utf-8')
|
||||||
|
return json.loads(response)
|
||||||
|
|
||||||
|
def list_releases(self):
|
||||||
|
return self._call(self._API_URL)
|
||||||
|
|
||||||
|
def create_release(self, tag_name, name=None, body='', draft=False, prerelease=False):
|
||||||
|
data = {
|
||||||
|
'tag_name': tag_name,
|
||||||
|
'target_commitish': 'master',
|
||||||
|
'name': name,
|
||||||
|
'body': body,
|
||||||
|
'draft': draft,
|
||||||
|
'prerelease': prerelease,
|
||||||
|
}
|
||||||
|
req = sanitized_Request(self._API_URL, json.dumps(data).encode('utf-8'))
|
||||||
|
return self._call(req)
|
||||||
|
|
||||||
|
def create_asset(self, release_id, asset):
|
||||||
|
asset_name = os.path.basename(asset)
|
||||||
|
url = self._UPLOADS_URL % (release_id, asset_name)
|
||||||
|
# Our files are small enough to be loaded directly into memory.
|
||||||
|
data = open(asset, 'rb').read()
|
||||||
|
req = sanitized_Request(url, data)
|
||||||
|
mime_type, _ = mimetypes.guess_type(asset_name)
|
||||||
|
req.add_header('Content-Type', mime_type or 'application/octet-stream')
|
||||||
|
return self._call(req)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser(usage='%prog VERSION BUILDPATH')
|
||||||
|
options, args = parser.parse_args()
|
||||||
|
if len(args) != 2:
|
||||||
|
parser.error('Expected a version and a build directory')
|
||||||
|
|
||||||
|
version, build_path = args
|
||||||
|
|
||||||
|
releaser = GitHubReleaser(debuglevel=0)
|
||||||
|
|
||||||
|
new_release = releaser.create_release(
|
||||||
|
version, name='youtube-dl %s' % version, draft=True, prerelease=True)
|
||||||
|
release_id = new_release['id']
|
||||||
|
|
||||||
|
for asset in os.listdir(build_path):
|
||||||
|
compat_print('Uploading %s...' % asset)
|
||||||
|
releaser.create_asset(release_id, os.path.join(build_path, asset))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -95,17 +95,16 @@ RELEASE_FILES="youtube-dl youtube-dl.exe youtube-dl-$version.tar.gz"
|
||||||
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
|
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
|
||||||
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
|
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
|
||||||
|
|
||||||
/bin/echo -e "\n### Signing and uploading the new binaries to yt-dl.org ..."
|
/bin/echo -e "\n### Signing and uploading the new binaries to GitHub..."
|
||||||
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
|
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
|
||||||
|
|
||||||
echo 'TODO: upload on GitHub'
|
ROOT=$(pwd)
|
||||||
exit 1
|
python devscripts/create-github-release.py $version "$ROOT/build/$version"
|
||||||
|
|
||||||
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
|
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
|
||||||
|
|
||||||
/bin/echo -e "\n### Now switching to gh-pages..."
|
/bin/echo -e "\n### Now switching to gh-pages..."
|
||||||
git clone --branch gh-pages --single-branch . build/gh-pages
|
git clone --branch gh-pages --single-branch . build/gh-pages
|
||||||
ROOT=$(pwd)
|
|
||||||
(
|
(
|
||||||
set -e
|
set -e
|
||||||
ORIGIN_URL=$(git config --get remote.origin.url)
|
ORIGIN_URL=$(git config --get remote.origin.url)
|
||||||
|
|
|
@ -482,6 +482,11 @@ if sys.version_info < (3, 0) and sys.platform == 'win32':
|
||||||
else:
|
else:
|
||||||
compat_getpass = getpass.getpass
|
compat_getpass = getpass.getpass
|
||||||
|
|
||||||
|
try:
|
||||||
|
compat_input = raw_input
|
||||||
|
except NameError: # Python 3
|
||||||
|
compat_input = input
|
||||||
|
|
||||||
# Python < 2.6.5 require kwargs to be bytes
|
# Python < 2.6.5 require kwargs to be bytes
|
||||||
try:
|
try:
|
||||||
def _testfunc(x):
|
def _testfunc(x):
|
||||||
|
|
|
@ -23,11 +23,17 @@ class HlsFD(FragmentFD):
|
||||||
UNSUPPORTED_FEATURES = (
|
UNSUPPORTED_FEATURES = (
|
||||||
r'#EXT-X-KEY:METHOD=(?!NONE)', # encrypted streams [1]
|
r'#EXT-X-KEY:METHOD=(?!NONE)', # encrypted streams [1]
|
||||||
r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
||||||
|
|
||||||
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
||||||
# http://hls-geo.daserste.de/i/videoportal/Film/c_620000/622873/format,716451,716457,716450,716458,716459,.mp4.csmil/index_4_av.m3u8?null=0)
|
# http://hls-geo.daserste.de/i/videoportal/Film/c_620000/622873/format,716451,716457,716450,716458,716459,.mp4.csmil/index_4_av.m3u8?null=0)
|
||||||
# r'#EXT-X-MEDIA-SEQUENCE:(?!0$)', # live streams [3]
|
# r'#EXT-X-MEDIA-SEQUENCE:(?!0$)', # live streams [3]
|
||||||
r'#EXT-X-PLAYLIST-TYPE:EVENT', # media segments may be appended to the end of
|
|
||||||
# event media playlists [4]
|
# This heuristic also is not correct since segments may not be appended as well.
|
||||||
|
# Twitch vods of finished streams have EXT-X-PLAYLIST-TYPE:EVENT despite
|
||||||
|
# no segments will definitely be appended to the end of the playlist.
|
||||||
|
# r'#EXT-X-PLAYLIST-TYPE:EVENT', # media segments may be appended to the end of
|
||||||
|
# # event media playlists [4]
|
||||||
|
|
||||||
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.4
|
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.4
|
||||||
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
|
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
|
||||||
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
||||||
|
|
|
@ -910,6 +910,7 @@ from .videomore import (
|
||||||
)
|
)
|
||||||
from .videopremium import VideoPremiumIE
|
from .videopremium import VideoPremiumIE
|
||||||
from .videott import VideoTtIE
|
from .videott import VideoTtIE
|
||||||
|
from .vidio import VidioIE
|
||||||
from .vidme import (
|
from .vidme import (
|
||||||
VidmeIE,
|
VidmeIE,
|
||||||
VidmeUserIE,
|
VidmeUserIE,
|
||||||
|
|
|
@ -260,7 +260,7 @@ class TwitchVodIE(TwitchItemBaseIE):
|
||||||
'nauth': access_token['token'],
|
'nauth': access_token['token'],
|
||||||
'nauthsig': access_token['sig'],
|
'nauthsig': access_token['sig'],
|
||||||
})),
|
})),
|
||||||
item_id, 'mp4')
|
item_id, 'mp4', entry_protocol='m3u8_native')
|
||||||
|
|
||||||
self._prefer_source(formats)
|
self._prefer_source(formats)
|
||||||
info['formats'] = formats
|
info['formats'] = formats
|
||||||
|
|
73
youtube_dl/extractor/vidio.py
Normal file
73
youtube_dl/extractor/vidio.py
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import int_or_none
|
||||||
|
|
||||||
|
|
||||||
|
class VidioIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?vidio\.com/watch/(?P<id>\d+)-(?P<display_id>[^/?#&]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'http://www.vidio.com/watch/165683-dj_ambred-booyah-live-2015',
|
||||||
|
'md5': 'cd2801394afc164e9775db6a140b91fe',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '165683',
|
||||||
|
'display_id': 'dj_ambred-booyah-live-2015',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'DJ_AMBRED - Booyah (Live 2015)',
|
||||||
|
'description': 'md5:27dc15f819b6a78a626490881adbadf8',
|
||||||
|
'thumbnail': 're:^https?://.*\.jpg$',
|
||||||
|
'duration': 149,
|
||||||
|
'like_count': int,
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'url': 'https://www.vidio.com/watch/77949-south-korea-test-fires-missile-that-can-strike-all-of-the-north',
|
||||||
|
'only_matching': True,
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
mobj = re.match(self._VALID_URL, url)
|
||||||
|
video_id, display_id = mobj.group('id', 'display_id')
|
||||||
|
|
||||||
|
webpage = self._download_webpage(url, display_id)
|
||||||
|
|
||||||
|
title = self._og_search_title(webpage)
|
||||||
|
|
||||||
|
m3u8_url, duration, thumbnail = [None] * 3
|
||||||
|
|
||||||
|
clips = self._parse_json(
|
||||||
|
self._html_search_regex(
|
||||||
|
r'data-json-clips\s*=\s*(["\'])(?P<data>\[.+?\])\1',
|
||||||
|
webpage, 'video data', default='[]', group='data'),
|
||||||
|
display_id, fatal=False)
|
||||||
|
if clips:
|
||||||
|
clip = clips[0]
|
||||||
|
m3u8_url = clip.get('sources', [{}])[0].get('file')
|
||||||
|
duration = clip.get('clip_duration')
|
||||||
|
thumbnail = clip.get('image')
|
||||||
|
|
||||||
|
m3u8_url = m3u8_url or self._search_regex(
|
||||||
|
r'data(?:-vjs)?-clip-hls-url=(["\'])(?P<url>.+?)\1', webpage, 'hls url')
|
||||||
|
formats = self._extract_m3u8_formats(m3u8_url, display_id, 'mp4', entry_protocol='m3u8_native')
|
||||||
|
|
||||||
|
duration = int_or_none(duration or self._search_regex(
|
||||||
|
r'data-video-duration=(["\'])(?P<duartion>\d+)\1', webpage, 'duration'))
|
||||||
|
thumbnail = thumbnail or self._og_search_thumbnail(webpage)
|
||||||
|
|
||||||
|
like_count = int_or_none(self._search_regex(
|
||||||
|
(r'<span[^>]+data-comment-vote-count=["\'](\d+)',
|
||||||
|
r'<span[^>]+class=["\'].*?\blike(?:__|-)count\b.*?["\'][^>]*>\s*(\d+)'),
|
||||||
|
webpage, 'like count', fatal=False))
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': video_id,
|
||||||
|
'display_id': display_id,
|
||||||
|
'title': title,
|
||||||
|
'description': self._og_search_description(webpage),
|
||||||
|
'thumbnail': thumbnail,
|
||||||
|
'duration': duration,
|
||||||
|
'like_count': like_count,
|
||||||
|
'formats': formats,
|
||||||
|
}
|
|
@ -344,6 +344,8 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||||
'139': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 48, 'preference': -50, 'container': 'm4a_dash'},
|
'139': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 48, 'preference': -50, 'container': 'm4a_dash'},
|
||||||
'140': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 128, 'preference': -50, 'container': 'm4a_dash'},
|
'140': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 128, 'preference': -50, 'container': 'm4a_dash'},
|
||||||
'141': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 256, 'preference': -50, 'container': 'm4a_dash'},
|
'141': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'abr': 256, 'preference': -50, 'container': 'm4a_dash'},
|
||||||
|
'256': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'preference': -50, 'container': 'm4a_dash'},
|
||||||
|
'258': {'ext': 'm4a', 'format_note': 'DASH audio', 'acodec': 'aac', 'preference': -50, 'container': 'm4a_dash'},
|
||||||
|
|
||||||
# Dash webm
|
# Dash webm
|
||||||
'167': {'ext': 'webm', 'height': 360, 'width': 640, 'format_note': 'DASH video', 'container': 'webm', 'vcodec': 'vp8', 'preference': -40},
|
'167': {'ext': 'webm', 'height': 360, 'width': 640, 'format_note': 'DASH video', 'container': 'webm', 'vcodec': 'vp8', 'preference': -40},
|
||||||
|
|
Loading…
Reference in a new issue