2014-11-02 10:36:29 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2015-10-17 15:15:01 +00:00
|
|
|
import binascii
|
2015-02-28 20:42:16 +00:00
|
|
|
import collections
|
2015-10-17 15:15:01 +00:00
|
|
|
import email
|
2014-11-02 10:23:40 +00:00
|
|
|
import getpass
|
2015-10-17 15:15:01 +00:00
|
|
|
import io
|
2014-11-19 17:21:58 +00:00
|
|
|
import optparse
|
2014-11-02 10:23:40 +00:00
|
|
|
import os
|
2014-11-23 09:49:19 +00:00
|
|
|
import re
|
2015-09-05 15:40:44 +00:00
|
|
|
import shlex
|
2015-02-28 20:42:16 +00:00
|
|
|
import shutil
|
2015-01-10 18:55:36 +00:00
|
|
|
import socket
|
2014-11-02 10:23:40 +00:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2015-07-10 10:58:12 +00:00
|
|
|
import itertools
|
2015-10-25 19:04:55 +00:00
|
|
|
import xml.etree.ElementTree
|
2014-11-02 10:23:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
import urllib.request as compat_urllib_request
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import urllib2 as compat_urllib_request
|
|
|
|
|
|
|
|
try:
|
|
|
|
import urllib.error as compat_urllib_error
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import urllib2 as compat_urllib_error
|
|
|
|
|
|
|
|
try:
|
|
|
|
import urllib.parse as compat_urllib_parse
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import urllib as compat_urllib_parse
|
|
|
|
|
|
|
|
try:
|
|
|
|
from urllib.parse import urlparse as compat_urllib_parse_urlparse
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
from urlparse import urlparse as compat_urllib_parse_urlparse
|
|
|
|
|
|
|
|
try:
|
|
|
|
import urllib.parse as compat_urlparse
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import urlparse as compat_urlparse
|
|
|
|
|
2015-10-17 15:15:01 +00:00
|
|
|
try:
|
|
|
|
import urllib.response as compat_urllib_response
|
|
|
|
except ImportError: # Python 2
|
|
|
|
import urllib as compat_urllib_response
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
try:
|
|
|
|
import http.cookiejar as compat_cookiejar
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import cookielib as compat_cookiejar
|
|
|
|
|
2015-07-29 22:20:37 +00:00
|
|
|
try:
|
|
|
|
import http.cookies as compat_cookies
|
|
|
|
except ImportError: # Python 2
|
|
|
|
import Cookie as compat_cookies
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
try:
|
|
|
|
import html.entities as compat_html_entities
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import htmlentitydefs as compat_html_entities
|
|
|
|
|
|
|
|
try:
|
|
|
|
import http.client as compat_http_client
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
import httplib as compat_http_client
|
|
|
|
|
|
|
|
try:
|
|
|
|
from urllib.error import HTTPError as compat_HTTPError
|
|
|
|
except ImportError: # Python 2
|
|
|
|
from urllib2 import HTTPError as compat_HTTPError
|
|
|
|
|
|
|
|
try:
|
|
|
|
from urllib.request import urlretrieve as compat_urlretrieve
|
|
|
|
except ImportError: # Python 2
|
|
|
|
from urllib import urlretrieve as compat_urlretrieve
|
|
|
|
|
2016-01-02 19:49:59 +00:00
|
|
|
try:
|
|
|
|
from html.parser import HTMLParser as compat_HTMLParser
|
|
|
|
except ImportError: # Python 2
|
|
|
|
from HTMLParser import HTMLParser as compat_HTMLParser
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from subprocess import DEVNULL
|
|
|
|
compat_subprocess_get_DEVNULL = lambda: DEVNULL
|
|
|
|
except ImportError:
|
|
|
|
compat_subprocess_get_DEVNULL = lambda: open(os.path.devnull, 'w')
|
|
|
|
|
2015-01-30 01:57:37 +00:00
|
|
|
try:
|
|
|
|
import http.server as compat_http_server
|
|
|
|
except ImportError:
|
|
|
|
import BaseHTTPServer as compat_http_server
|
|
|
|
|
2015-09-13 22:25:08 +00:00
|
|
|
try:
|
|
|
|
compat_str = unicode # Python 2
|
|
|
|
except NameError:
|
|
|
|
compat_str = str
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
try:
|
2015-07-17 16:58:13 +00:00
|
|
|
from urllib.parse import unquote_to_bytes as compat_urllib_parse_unquote_to_bytes
|
2014-11-02 10:23:40 +00:00
|
|
|
from urllib.parse import unquote as compat_urllib_parse_unquote
|
2015-07-17 17:28:34 +00:00
|
|
|
from urllib.parse import unquote_plus as compat_urllib_parse_unquote_plus
|
2015-07-17 16:58:13 +00:00
|
|
|
except ImportError: # Python 2
|
2015-07-18 12:32:52 +00:00
|
|
|
_asciire = (compat_urllib_parse._asciire if hasattr(compat_urllib_parse, '_asciire')
|
|
|
|
else re.compile('([\x00-\x7f]+)'))
|
2015-07-17 18:24:39 +00:00
|
|
|
|
2015-07-17 17:32:43 +00:00
|
|
|
# HACK: The following are the correct unquote_to_bytes, unquote and unquote_plus
|
2015-07-17 16:58:13 +00:00
|
|
|
# implementations from cpython 3.4.3's stdlib. Python 2's version
|
|
|
|
# is apparently broken (see https://github.com/rg3/youtube-dl/pull/6244)
|
|
|
|
|
2015-07-17 06:31:29 +00:00
|
|
|
def compat_urllib_parse_unquote_to_bytes(string):
|
|
|
|
"""unquote_to_bytes('abc%20def') -> b'abc def'."""
|
|
|
|
# Note: strings are encoded as UTF-8. This is only an issue if it contains
|
|
|
|
# unescaped non-ASCII characters, which URIs should not.
|
|
|
|
if not string:
|
|
|
|
# Is it a string-like object?
|
|
|
|
string.split
|
|
|
|
return b''
|
2015-09-13 22:25:08 +00:00
|
|
|
if isinstance(string, compat_str):
|
2015-07-17 06:31:29 +00:00
|
|
|
string = string.encode('utf-8')
|
2015-07-17 16:58:13 +00:00
|
|
|
bits = string.split(b'%')
|
2015-07-17 06:31:29 +00:00
|
|
|
if len(bits) == 1:
|
|
|
|
return string
|
|
|
|
res = [bits[0]]
|
|
|
|
append = res.append
|
|
|
|
for item in bits[1:]:
|
|
|
|
try:
|
2015-07-17 16:58:13 +00:00
|
|
|
append(compat_urllib_parse._hextochr[item[:2]])
|
2015-07-17 06:31:29 +00:00
|
|
|
append(item[2:])
|
2015-07-17 16:58:13 +00:00
|
|
|
except KeyError:
|
2015-07-17 06:31:29 +00:00
|
|
|
append(b'%')
|
|
|
|
append(item)
|
|
|
|
return b''.join(res)
|
|
|
|
|
2015-07-17 06:50:43 +00:00
|
|
|
def compat_urllib_parse_unquote(string, encoding='utf-8', errors='replace'):
|
2015-07-17 06:31:29 +00:00
|
|
|
"""Replace %xx escapes by their single-character equivalent. The optional
|
|
|
|
encoding and errors parameters specify how to decode percent-encoded
|
|
|
|
sequences into Unicode characters, as accepted by the bytes.decode()
|
|
|
|
method.
|
|
|
|
By default, percent-encoded sequences are decoded with UTF-8, and invalid
|
|
|
|
sequences are replaced by a placeholder character.
|
|
|
|
|
|
|
|
unquote('abc%20def') -> 'abc def'.
|
|
|
|
"""
|
|
|
|
if '%' not in string:
|
|
|
|
string.split
|
|
|
|
return string
|
|
|
|
if encoding is None:
|
|
|
|
encoding = 'utf-8'
|
|
|
|
if errors is None:
|
|
|
|
errors = 'replace'
|
2015-07-17 18:24:39 +00:00
|
|
|
bits = _asciire.split(string)
|
2015-07-17 06:31:29 +00:00
|
|
|
res = [bits[0]]
|
|
|
|
append = res.append
|
|
|
|
for i in range(1, len(bits), 2):
|
2015-07-17 16:58:13 +00:00
|
|
|
append(compat_urllib_parse_unquote_to_bytes(bits[i]).decode(encoding, errors))
|
|
|
|
append(bits[i + 1])
|
2015-07-17 06:31:29 +00:00
|
|
|
return ''.join(res)
|
|
|
|
|
2015-07-17 17:28:34 +00:00
|
|
|
def compat_urllib_parse_unquote_plus(string, encoding='utf-8', errors='replace'):
|
|
|
|
"""Like unquote(), but also replace plus signs by spaces, as required for
|
|
|
|
unquoting HTML form values.
|
|
|
|
|
|
|
|
unquote_plus('%7e/abc+def') -> '~/abc def'
|
|
|
|
"""
|
|
|
|
string = string.replace('+', ' ')
|
|
|
|
return compat_urllib_parse_unquote(string, encoding, errors)
|
|
|
|
|
2016-03-25 19:46:57 +00:00
|
|
|
try:
|
|
|
|
from urllib.parse import urlencode as compat_urllib_parse_urlencode
|
|
|
|
except ImportError: # Python 2
|
|
|
|
# Python 2 will choke in urlencode on mixture of byte and unicode strings.
|
|
|
|
# Possible solutions are to either port it from python 3 with all
|
|
|
|
# the friends or manually ensure input query contains only byte strings.
|
|
|
|
# We will stick with latter thus recursively encoding the whole query.
|
|
|
|
def compat_urllib_parse_urlencode(query, doseq=0, encoding='utf-8'):
|
|
|
|
def encode_elem(e):
|
|
|
|
if isinstance(e, dict):
|
|
|
|
e = encode_dict(e)
|
|
|
|
elif isinstance(e, (list, tuple,)):
|
2016-04-06 10:29:54 +00:00
|
|
|
list_e = encode_list(e)
|
|
|
|
e = tuple(list_e) if isinstance(e, tuple) else list_e
|
2016-03-25 19:46:57 +00:00
|
|
|
elif isinstance(e, compat_str):
|
|
|
|
e = e.encode(encoding)
|
|
|
|
return e
|
|
|
|
|
|
|
|
def encode_dict(d):
|
|
|
|
return dict((encode_elem(k), encode_elem(v)) for k, v in d.items())
|
|
|
|
|
|
|
|
def encode_list(l):
|
|
|
|
return [encode_elem(e) for e in l]
|
|
|
|
|
|
|
|
return compat_urllib_parse.urlencode(encode_elem(query), doseq=doseq)
|
|
|
|
|
2015-10-17 15:15:01 +00:00
|
|
|
try:
|
|
|
|
from urllib.request import DataHandler as compat_urllib_request_DataHandler
|
|
|
|
except ImportError: # Python < 3.4
|
|
|
|
# Ported from CPython 98774:1733b3bd46db, Lib/urllib/request.py
|
|
|
|
class compat_urllib_request_DataHandler(compat_urllib_request.BaseHandler):
|
|
|
|
def data_open(self, req):
|
|
|
|
# data URLs as specified in RFC 2397.
|
|
|
|
#
|
|
|
|
# ignores POSTed data
|
|
|
|
#
|
|
|
|
# syntax:
|
|
|
|
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
|
|
|
# mediatype := [ type "/" subtype ] *( ";" parameter )
|
|
|
|
# data := *urlchar
|
|
|
|
# parameter := attribute "=" value
|
|
|
|
url = req.get_full_url()
|
|
|
|
|
2016-02-14 09:37:17 +00:00
|
|
|
scheme, data = url.split(':', 1)
|
|
|
|
mediatype, data = data.split(',', 1)
|
2015-10-17 15:15:01 +00:00
|
|
|
|
|
|
|
# even base64 encoded data URLs might be quoted so unquote in any case:
|
|
|
|
data = compat_urllib_parse_unquote_to_bytes(data)
|
2016-02-14 09:37:17 +00:00
|
|
|
if mediatype.endswith(';base64'):
|
2015-10-17 15:15:01 +00:00
|
|
|
data = binascii.a2b_base64(data)
|
|
|
|
mediatype = mediatype[:-7]
|
|
|
|
|
|
|
|
if not mediatype:
|
2016-02-14 09:37:17 +00:00
|
|
|
mediatype = 'text/plain;charset=US-ASCII'
|
2015-10-17 15:15:01 +00:00
|
|
|
|
|
|
|
headers = email.message_from_string(
|
2016-02-14 09:37:17 +00:00
|
|
|
'Content-type: %s\nContent-length: %d\n' % (mediatype, len(data)))
|
2015-10-17 15:15:01 +00:00
|
|
|
|
|
|
|
return compat_urllib_response.addinfourl(io.BytesIO(data), headers, url)
|
|
|
|
|
2015-02-01 10:30:56 +00:00
|
|
|
try:
|
2015-02-01 10:36:59 +00:00
|
|
|
compat_basestring = basestring # Python 2
|
2015-02-01 10:30:56 +00:00
|
|
|
except NameError:
|
2015-02-01 10:36:59 +00:00
|
|
|
compat_basestring = str
|
2015-02-01 10:30:56 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
compat_chr = unichr # Python 2
|
|
|
|
except NameError:
|
|
|
|
compat_chr = chr
|
|
|
|
|
|
|
|
try:
|
|
|
|
from xml.etree.ElementTree import ParseError as compat_xml_parse_error
|
|
|
|
except ImportError: # Python 2.6
|
|
|
|
from xml.parsers.expat import ExpatError as compat_xml_parse_error
|
|
|
|
|
2015-10-25 19:04:55 +00:00
|
|
|
if sys.version_info[0] >= 3:
|
|
|
|
compat_etree_fromstring = xml.etree.ElementTree.fromstring
|
|
|
|
else:
|
2015-10-29 12:58:40 +00:00
|
|
|
# python 2.x tries to encode unicode strings with ascii (see the
|
|
|
|
# XMLParser._fixtext method)
|
2015-10-25 19:04:55 +00:00
|
|
|
etree = xml.etree.ElementTree
|
|
|
|
|
2015-10-26 15:41:24 +00:00
|
|
|
try:
|
|
|
|
_etree_iter = etree.Element.iter
|
|
|
|
except AttributeError: # Python <=2.6
|
|
|
|
def _etree_iter(root):
|
|
|
|
for el in root.findall('*'):
|
|
|
|
yield el
|
|
|
|
for sub in _etree_iter(el):
|
|
|
|
yield sub
|
|
|
|
|
2015-10-25 19:04:55 +00:00
|
|
|
# on 2.6 XML doesn't have a parser argument, function copied from CPython
|
|
|
|
# 2.7 source
|
|
|
|
def _XML(text, parser=None):
|
|
|
|
if not parser:
|
|
|
|
parser = etree.XMLParser(target=etree.TreeBuilder())
|
|
|
|
parser.feed(text)
|
|
|
|
return parser.close()
|
|
|
|
|
|
|
|
def _element_factory(*args, **kwargs):
|
|
|
|
el = etree.Element(*args, **kwargs)
|
|
|
|
for k, v in el.items():
|
2015-10-25 19:30:54 +00:00
|
|
|
if isinstance(v, bytes):
|
|
|
|
el.set(k, v.decode('utf-8'))
|
2015-10-25 19:04:55 +00:00
|
|
|
return el
|
|
|
|
|
|
|
|
def compat_etree_fromstring(text):
|
2015-10-26 15:41:24 +00:00
|
|
|
doc = _XML(text, parser=etree.XMLParser(target=etree.TreeBuilder(element_factory=_element_factory)))
|
|
|
|
for el in _etree_iter(doc):
|
|
|
|
if el.text is not None and isinstance(el.text, bytes):
|
|
|
|
el.text = el.text.decode('utf-8')
|
|
|
|
return doc
|
2014-11-02 10:23:40 +00:00
|
|
|
|
2016-03-17 20:51:38 +00:00
|
|
|
if sys.version_info < (2, 7):
|
|
|
|
# Here comes the crazy part: In 2.6, if the xpath is a unicode,
|
|
|
|
# .//node does not match if a node is a direct child of . !
|
|
|
|
def compat_xpath(xpath):
|
|
|
|
if isinstance(xpath, compat_str):
|
|
|
|
xpath = xpath.encode('ascii')
|
|
|
|
return xpath
|
|
|
|
else:
|
|
|
|
compat_xpath = lambda xpath: xpath
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
try:
|
|
|
|
from urllib.parse import parse_qs as compat_parse_qs
|
2014-11-23 19:41:03 +00:00
|
|
|
except ImportError: # Python 2
|
2014-11-02 10:23:40 +00:00
|
|
|
# HACK: The following is the correct parse_qs implementation from cpython 3's stdlib.
|
|
|
|
# Python 2's version is apparently totally broken
|
|
|
|
|
|
|
|
def _parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
|
2014-11-23 20:39:15 +00:00
|
|
|
encoding='utf-8', errors='replace'):
|
2015-02-01 10:30:56 +00:00
|
|
|
qs, _coerce_result = qs, compat_str
|
2014-11-02 10:23:40 +00:00
|
|
|
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
|
|
|
r = []
|
|
|
|
for name_value in pairs:
|
|
|
|
if not name_value and not strict_parsing:
|
|
|
|
continue
|
|
|
|
nv = name_value.split('=', 1)
|
|
|
|
if len(nv) != 2:
|
|
|
|
if strict_parsing:
|
2016-02-14 09:37:17 +00:00
|
|
|
raise ValueError('bad query field: %r' % (name_value,))
|
2014-11-02 10:23:40 +00:00
|
|
|
# Handle case of a control-name with no equal sign
|
|
|
|
if keep_blank_values:
|
|
|
|
nv.append('')
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
if len(nv[1]) or keep_blank_values:
|
|
|
|
name = nv[0].replace('+', ' ')
|
|
|
|
name = compat_urllib_parse_unquote(
|
|
|
|
name, encoding=encoding, errors=errors)
|
|
|
|
name = _coerce_result(name)
|
|
|
|
value = nv[1].replace('+', ' ')
|
|
|
|
value = compat_urllib_parse_unquote(
|
|
|
|
value, encoding=encoding, errors=errors)
|
|
|
|
value = _coerce_result(value)
|
|
|
|
r.append((name, value))
|
|
|
|
return r
|
|
|
|
|
|
|
|
def compat_parse_qs(qs, keep_blank_values=False, strict_parsing=False,
|
2014-11-23 20:39:15 +00:00
|
|
|
encoding='utf-8', errors='replace'):
|
2014-11-02 10:23:40 +00:00
|
|
|
parsed_result = {}
|
|
|
|
pairs = _parse_qsl(qs, keep_blank_values, strict_parsing,
|
2014-11-23 20:39:15 +00:00
|
|
|
encoding=encoding, errors=errors)
|
2014-11-02 10:23:40 +00:00
|
|
|
for name, value in pairs:
|
|
|
|
if name in parsed_result:
|
|
|
|
parsed_result[name].append(value)
|
|
|
|
else:
|
|
|
|
parsed_result[name] = [value]
|
|
|
|
return parsed_result
|
|
|
|
|
|
|
|
try:
|
|
|
|
from shlex import quote as shlex_quote
|
|
|
|
except ImportError: # Python < 3.3
|
|
|
|
def shlex_quote(s):
|
2014-11-23 09:49:19 +00:00
|
|
|
if re.match(r'^[-_\w./]+$', s):
|
|
|
|
return s
|
|
|
|
else:
|
|
|
|
return "'" + s.replace("'", "'\"'\"'") + "'"
|
2014-11-02 10:23:40 +00:00
|
|
|
|
|
|
|
|
2015-09-05 16:21:06 +00:00
|
|
|
if sys.version_info >= (2, 7, 3):
|
2015-09-05 15:40:44 +00:00
|
|
|
compat_shlex_split = shlex.split
|
|
|
|
else:
|
|
|
|
# Working around shlex issue with unicode strings on some python 2
|
|
|
|
# versions (see http://bugs.python.org/issue1548891)
|
|
|
|
def compat_shlex_split(s, comments=False, posix=True):
|
2015-09-13 22:25:08 +00:00
|
|
|
if isinstance(s, compat_str):
|
2015-09-05 15:40:44 +00:00
|
|
|
s = s.encode('utf-8')
|
|
|
|
return shlex.split(s, comments, posix)
|
|
|
|
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
def compat_ord(c):
|
2014-11-23 19:41:03 +00:00
|
|
|
if type(c) is int:
|
|
|
|
return c
|
|
|
|
else:
|
|
|
|
return ord(c)
|
2014-11-02 10:23:40 +00:00
|
|
|
|
|
|
|
|
2016-03-03 11:24:24 +00:00
|
|
|
compat_os_name = os._name if os.name == 'java' else os.name
|
|
|
|
|
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
if sys.version_info >= (3, 0):
|
|
|
|
compat_getenv = os.getenv
|
|
|
|
compat_expanduser = os.path.expanduser
|
|
|
|
else:
|
|
|
|
# Environment variables should be decoded with filesystem encoding.
|
|
|
|
# Otherwise it will fail if any non-ASCII characters present (see #3854 #3217 #2918)
|
|
|
|
|
|
|
|
def compat_getenv(key, default=None):
|
|
|
|
from .utils import get_filesystem_encoding
|
|
|
|
env = os.getenv(key, default)
|
|
|
|
if env:
|
|
|
|
env = env.decode(get_filesystem_encoding())
|
|
|
|
return env
|
|
|
|
|
|
|
|
# HACK: The default implementations of os.path.expanduser from cpython do not decode
|
|
|
|
# environment variables with filesystem encoding. We will work around this by
|
|
|
|
# providing adjusted implementations.
|
|
|
|
# The following are os.path.expanduser implementations from cpython 2.7.8 stdlib
|
|
|
|
# for different platforms with correct environment variables decoding.
|
|
|
|
|
2016-03-03 11:24:24 +00:00
|
|
|
if compat_os_name == 'posix':
|
2014-11-02 10:23:40 +00:00
|
|
|
def compat_expanduser(path):
|
|
|
|
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
|
|
|
|
do nothing."""
|
|
|
|
if not path.startswith('~'):
|
|
|
|
return path
|
|
|
|
i = path.find('/', 1)
|
|
|
|
if i < 0:
|
|
|
|
i = len(path)
|
|
|
|
if i == 1:
|
|
|
|
if 'HOME' not in os.environ:
|
|
|
|
import pwd
|
|
|
|
userhome = pwd.getpwuid(os.getuid()).pw_dir
|
|
|
|
else:
|
|
|
|
userhome = compat_getenv('HOME')
|
|
|
|
else:
|
|
|
|
import pwd
|
|
|
|
try:
|
|
|
|
pwent = pwd.getpwnam(path[1:i])
|
|
|
|
except KeyError:
|
|
|
|
return path
|
|
|
|
userhome = pwent.pw_dir
|
|
|
|
userhome = userhome.rstrip('/')
|
|
|
|
return (userhome + path[i:]) or '/'
|
2016-03-03 11:24:24 +00:00
|
|
|
elif compat_os_name == 'nt' or compat_os_name == 'ce':
|
2014-11-02 10:23:40 +00:00
|
|
|
def compat_expanduser(path):
|
|
|
|
"""Expand ~ and ~user constructs.
|
|
|
|
|
|
|
|
If user or $HOME is unknown, do nothing."""
|
|
|
|
if path[:1] != '~':
|
|
|
|
return path
|
|
|
|
i, n = 1, len(path)
|
|
|
|
while i < n and path[i] not in '/\\':
|
|
|
|
i = i + 1
|
|
|
|
|
|
|
|
if 'HOME' in os.environ:
|
|
|
|
userhome = compat_getenv('HOME')
|
|
|
|
elif 'USERPROFILE' in os.environ:
|
|
|
|
userhome = compat_getenv('USERPROFILE')
|
2014-12-09 22:11:26 +00:00
|
|
|
elif 'HOMEPATH' not in os.environ:
|
2014-11-02 10:23:40 +00:00
|
|
|
return path
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
drive = compat_getenv('HOMEDRIVE')
|
|
|
|
except KeyError:
|
|
|
|
drive = ''
|
|
|
|
userhome = os.path.join(drive, compat_getenv('HOMEPATH'))
|
|
|
|
|
2014-11-23 19:41:03 +00:00
|
|
|
if i != 1: # ~user
|
2014-11-02 10:23:40 +00:00
|
|
|
userhome = os.path.join(os.path.dirname(userhome), path[1:i])
|
|
|
|
|
|
|
|
return userhome + path[i:]
|
|
|
|
else:
|
|
|
|
compat_expanduser = os.path.expanduser
|
|
|
|
|
|
|
|
|
|
|
|
if sys.version_info < (3, 0):
|
|
|
|
def compat_print(s):
|
|
|
|
from .utils import preferredencoding
|
|
|
|
print(s.encode(preferredencoding(), 'xmlcharrefreplace'))
|
|
|
|
else:
|
|
|
|
def compat_print(s):
|
2014-11-26 10:48:09 +00:00
|
|
|
assert isinstance(s, compat_str)
|
2014-11-02 10:23:40 +00:00
|
|
|
print(s)
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
subprocess_check_output = subprocess.check_output
|
|
|
|
except AttributeError:
|
|
|
|
def subprocess_check_output(*args, **kwargs):
|
|
|
|
assert 'input' not in kwargs
|
|
|
|
p = subprocess.Popen(*args, stdout=subprocess.PIPE, **kwargs)
|
|
|
|
output, _ = p.communicate()
|
|
|
|
ret = p.poll()
|
|
|
|
if ret:
|
|
|
|
raise subprocess.CalledProcessError(ret, p.args, output=output)
|
|
|
|
return output
|
|
|
|
|
|
|
|
if sys.version_info < (3, 0) and sys.platform == 'win32':
|
|
|
|
def compat_getpass(prompt, *args, **kwargs):
|
|
|
|
if isinstance(prompt, compat_str):
|
2014-11-02 10:26:40 +00:00
|
|
|
from .utils import preferredencoding
|
2014-11-02 10:23:40 +00:00
|
|
|
prompt = prompt.encode(preferredencoding())
|
|
|
|
return getpass.getpass(prompt, *args, **kwargs)
|
|
|
|
else:
|
|
|
|
compat_getpass = getpass.getpass
|
|
|
|
|
2016-01-16 14:17:31 +00:00
|
|
|
# Python < 2.6.5 require kwargs to be bytes
|
2014-11-15 14:17:19 +00:00
|
|
|
try:
|
2014-12-12 11:42:33 +00:00
|
|
|
def _testfunc(x):
|
|
|
|
pass
|
|
|
|
_testfunc(**{'x': 0})
|
2014-11-15 14:17:19 +00:00
|
|
|
except TypeError:
|
|
|
|
def compat_kwargs(kwargs):
|
|
|
|
return dict((bytes(k), v) for k, v in kwargs.items())
|
|
|
|
else:
|
|
|
|
compat_kwargs = lambda kwargs: kwargs
|
2014-11-02 10:23:40 +00:00
|
|
|
|
2014-11-19 17:21:58 +00:00
|
|
|
|
2015-01-10 18:55:36 +00:00
|
|
|
if sys.version_info < (2, 7):
|
|
|
|
def compat_socket_create_connection(address, timeout, source_address=None):
|
|
|
|
host, port = address
|
|
|
|
err = None
|
|
|
|
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
|
|
|
|
af, socktype, proto, canonname, sa = res
|
|
|
|
sock = None
|
|
|
|
try:
|
|
|
|
sock = socket.socket(af, socktype, proto)
|
|
|
|
sock.settimeout(timeout)
|
|
|
|
if source_address:
|
|
|
|
sock.bind(source_address)
|
|
|
|
sock.connect(sa)
|
|
|
|
return sock
|
|
|
|
except socket.error as _:
|
|
|
|
err = _
|
|
|
|
if sock is not None:
|
|
|
|
sock.close()
|
|
|
|
if err is not None:
|
|
|
|
raise err
|
|
|
|
else:
|
2016-02-14 09:37:17 +00:00
|
|
|
raise socket.error('getaddrinfo returns an empty list')
|
2015-01-10 18:55:36 +00:00
|
|
|
else:
|
|
|
|
compat_socket_create_connection = socket.create_connection
|
|
|
|
|
|
|
|
|
2014-11-19 17:21:58 +00:00
|
|
|
# Fix https://github.com/rg3/youtube-dl/issues/4223
|
|
|
|
# See http://bugs.python.org/issue9161 for what is broken
|
|
|
|
def workaround_optparse_bug9161():
|
2014-11-20 06:21:12 +00:00
|
|
|
op = optparse.OptionParser()
|
|
|
|
og = optparse.OptionGroup(op, 'foo')
|
2014-11-19 17:21:58 +00:00
|
|
|
try:
|
2014-11-20 06:21:12 +00:00
|
|
|
og.add_option('-t')
|
2014-11-20 15:35:55 +00:00
|
|
|
except TypeError:
|
2014-11-19 17:21:58 +00:00
|
|
|
real_add_option = optparse.OptionGroup.add_option
|
|
|
|
|
|
|
|
def _compat_add_option(self, *args, **kwargs):
|
|
|
|
enc = lambda v: (
|
|
|
|
v.encode('ascii', 'replace') if isinstance(v, compat_str)
|
|
|
|
else v)
|
|
|
|
bargs = [enc(a) for a in args]
|
|
|
|
bkwargs = dict(
|
|
|
|
(k, enc(v)) for k, v in kwargs.items())
|
|
|
|
return real_add_option(self, *bargs, **bkwargs)
|
|
|
|
optparse.OptionGroup.add_option = _compat_add_option
|
|
|
|
|
2015-02-28 20:42:16 +00:00
|
|
|
if hasattr(shutil, 'get_terminal_size'): # Python >= 3.3
|
|
|
|
compat_get_terminal_size = shutil.get_terminal_size
|
|
|
|
else:
|
|
|
|
_terminal_size = collections.namedtuple('terminal_size', ['columns', 'lines'])
|
|
|
|
|
2015-09-13 12:04:27 +00:00
|
|
|
def compat_get_terminal_size(fallback=(80, 24)):
|
2015-10-06 12:28:14 +00:00
|
|
|
columns = compat_getenv('COLUMNS')
|
2015-02-28 20:42:16 +00:00
|
|
|
if columns:
|
|
|
|
columns = int(columns)
|
|
|
|
else:
|
|
|
|
columns = None
|
2015-10-06 12:28:14 +00:00
|
|
|
lines = compat_getenv('LINES')
|
2015-02-28 20:42:16 +00:00
|
|
|
if lines:
|
|
|
|
lines = int(lines)
|
|
|
|
else:
|
|
|
|
lines = None
|
|
|
|
|
2015-10-06 12:28:14 +00:00
|
|
|
if columns is None or lines is None or columns <= 0 or lines <= 0:
|
2015-09-13 12:04:27 +00:00
|
|
|
try:
|
|
|
|
sp = subprocess.Popen(
|
|
|
|
['stty', 'size'],
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
out, err = sp.communicate()
|
2015-10-06 16:02:28 +00:00
|
|
|
_lines, _columns = map(int, out.split())
|
2015-09-13 12:04:27 +00:00
|
|
|
except Exception:
|
|
|
|
_columns, _lines = _terminal_size(*fallback)
|
|
|
|
|
2015-10-06 12:28:14 +00:00
|
|
|
if columns is None or columns <= 0:
|
2015-09-13 12:04:27 +00:00
|
|
|
columns = _columns
|
2015-10-06 12:28:14 +00:00
|
|
|
if lines is None or lines <= 0:
|
2015-09-13 12:04:27 +00:00
|
|
|
lines = _lines
|
2015-02-28 20:42:16 +00:00
|
|
|
return _terminal_size(columns, lines)
|
|
|
|
|
2015-07-10 10:58:12 +00:00
|
|
|
try:
|
|
|
|
itertools.count(start=0, step=1)
|
|
|
|
compat_itertools_count = itertools.count
|
|
|
|
except TypeError: # Python 2.6
|
|
|
|
def compat_itertools_count(start=0, step=1):
|
|
|
|
n = start
|
|
|
|
while True:
|
|
|
|
yield n
|
|
|
|
n += step
|
2014-11-19 17:21:58 +00:00
|
|
|
|
2015-06-28 20:08:29 +00:00
|
|
|
if sys.version_info >= (3, 0):
|
|
|
|
from tokenize import tokenize as compat_tokenize_tokenize
|
|
|
|
else:
|
|
|
|
from tokenize import generate_tokens as compat_tokenize_tokenize
|
2014-11-19 17:21:58 +00:00
|
|
|
|
2014-11-02 10:23:40 +00:00
|
|
|
__all__ = [
|
2016-01-02 19:49:59 +00:00
|
|
|
'compat_HTMLParser',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_HTTPError',
|
2015-02-01 10:36:59 +00:00
|
|
|
'compat_basestring',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_chr',
|
|
|
|
'compat_cookiejar',
|
2015-07-29 22:20:37 +00:00
|
|
|
'compat_cookies',
|
2015-10-25 19:04:55 +00:00
|
|
|
'compat_etree_fromstring',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_expanduser',
|
2015-02-28 20:42:16 +00:00
|
|
|
'compat_get_terminal_size',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_getenv',
|
|
|
|
'compat_getpass',
|
|
|
|
'compat_html_entities',
|
|
|
|
'compat_http_client',
|
2015-01-30 01:57:37 +00:00
|
|
|
'compat_http_server',
|
2015-07-10 10:58:12 +00:00
|
|
|
'compat_itertools_count',
|
2014-11-15 14:17:19 +00:00
|
|
|
'compat_kwargs',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_ord',
|
2016-03-03 11:24:24 +00:00
|
|
|
'compat_os_name',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_parse_qs',
|
|
|
|
'compat_print',
|
2015-09-05 15:40:44 +00:00
|
|
|
'compat_shlex_split',
|
2015-01-10 18:55:36 +00:00
|
|
|
'compat_socket_create_connection',
|
2015-01-11 09:13:03 +00:00
|
|
|
'compat_str',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_subprocess_get_DEVNULL',
|
2015-06-28 20:08:29 +00:00
|
|
|
'compat_tokenize_tokenize',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_urllib_error',
|
|
|
|
'compat_urllib_parse',
|
|
|
|
'compat_urllib_parse_unquote',
|
2015-07-17 17:28:34 +00:00
|
|
|
'compat_urllib_parse_unquote_plus',
|
2015-07-17 12:24:07 +00:00
|
|
|
'compat_urllib_parse_unquote_to_bytes',
|
2016-03-25 19:46:57 +00:00
|
|
|
'compat_urllib_parse_urlencode',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_urllib_parse_urlparse',
|
|
|
|
'compat_urllib_request',
|
2015-10-17 15:15:01 +00:00
|
|
|
'compat_urllib_request_DataHandler',
|
|
|
|
'compat_urllib_response',
|
2014-11-02 10:23:40 +00:00
|
|
|
'compat_urlparse',
|
|
|
|
'compat_urlretrieve',
|
|
|
|
'compat_xml_parse_error',
|
2016-03-17 20:51:38 +00:00
|
|
|
'compat_xpath',
|
2014-11-02 10:23:40 +00:00
|
|
|
'shlex_quote',
|
|
|
|
'subprocess_check_output',
|
2014-11-19 17:21:58 +00:00
|
|
|
'workaround_optparse_bug9161',
|
2014-11-02 10:23:40 +00:00
|
|
|
]
|