upgrade wakatime-cli to v4.1.11
This commit is contained in:
parent
90731146f9
commit
3ce8f388ab
14 changed files with 139 additions and 81 deletions
|
@ -1,7 +1,7 @@
|
||||||
__title__ = 'wakatime'
|
__title__ = 'wakatime'
|
||||||
__description__ = 'Common interface to the WakaTime api.'
|
__description__ = 'Common interface to the WakaTime api.'
|
||||||
__url__ = 'https://github.com/wakatime/wakatime'
|
__url__ = 'https://github.com/wakatime/wakatime'
|
||||||
__version_info__ = ('4', '1', '10')
|
__version_info__ = ('4', '1', '11')
|
||||||
__version__ = '.'.join(__version_info__)
|
__version__ = '.'.join(__version_info__)
|
||||||
__author__ = 'Alan Hamlett'
|
__author__ = 'Alan Hamlett'
|
||||||
__author_email__ = 'alan@wakatime.com'
|
__author_email__ = 'alan@wakatime.com'
|
||||||
|
|
|
@ -13,3 +13,4 @@
|
||||||
SUCCESS = 0
|
SUCCESS = 0
|
||||||
API_ERROR = 102
|
API_ERROR = 102
|
||||||
CONFIG_FILE_PARSE_ERROR = 103
|
CONFIG_FILE_PARSE_ERROR = 103
|
||||||
|
AUTH_ERROR = 104
|
||||||
|
|
|
@ -73,7 +73,14 @@ class JsonFormatter(logging.Formatter):
|
||||||
|
|
||||||
|
|
||||||
def traceback_formatter(*args, **kwargs):
|
def traceback_formatter(*args, **kwargs):
|
||||||
logging.getLogger('WakaTime').error(traceback.format_exc())
|
if 'level' in kwargs and (kwargs['level'].lower() == 'warn' or kwargs['level'].lower() == 'warning'):
|
||||||
|
logging.getLogger('WakaTime').warning(traceback.format_exc())
|
||||||
|
elif 'level' in kwargs and kwargs['level'].lower() == 'info':
|
||||||
|
logging.getLogger('WakaTime').info(traceback.format_exc())
|
||||||
|
elif 'level' in kwargs and kwargs['level'].lower() == 'debug':
|
||||||
|
logging.getLogger('WakaTime').debug(traceback.format_exc())
|
||||||
|
else:
|
||||||
|
logging.getLogger('WakaTime').error(traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
def set_log_level(logger, args):
|
def set_log_level(logger, args):
|
||||||
|
|
|
@ -25,12 +25,18 @@ try:
|
||||||
except ImportError: # pragma: nocover
|
except ImportError: # pragma: nocover
|
||||||
import configparser
|
import configparser
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
pwd = os.path.dirname(os.path.abspath(__file__))
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages'))
|
sys.path.insert(0, os.path.dirname(pwd))
|
||||||
|
sys.path.insert(0, os.path.join(pwd, 'packages'))
|
||||||
|
|
||||||
from .__about__ import __version__
|
from .__about__ import __version__
|
||||||
from .compat import u, open, is_py3
|
from .compat import u, open, is_py3
|
||||||
from .constants import SUCCESS, API_ERROR, CONFIG_FILE_PARSE_ERROR
|
from .constants import (
|
||||||
|
API_ERROR,
|
||||||
|
AUTH_ERROR,
|
||||||
|
CONFIG_FILE_PARSE_ERROR,
|
||||||
|
SUCCESS,
|
||||||
|
)
|
||||||
from .logger import setup_logging
|
from .logger import setup_logging
|
||||||
from .offlinequeue import Queue
|
from .offlinequeue import Queue
|
||||||
from .packages import argparse
|
from .packages import argparse
|
||||||
|
@ -123,12 +129,14 @@ def parseArguments():
|
||||||
'"url", "domain", or "app"; defaults to file.')
|
'"url", "domain", or "app"; defaults to file.')
|
||||||
parser.add_argument('--proxy', dest='proxy',
|
parser.add_argument('--proxy', dest='proxy',
|
||||||
help='optional https proxy url; for example: '+
|
help='optional https proxy url; for example: '+
|
||||||
'https://user:pass@localhost:8080')
|
'https://user:pass@localhost:8080')
|
||||||
parser.add_argument('--project', dest='project',
|
parser.add_argument('--project', dest='project',
|
||||||
help='optional project name')
|
help='optional project name')
|
||||||
parser.add_argument('--alternate-project', dest='alternate_project',
|
parser.add_argument('--alternate-project', dest='alternate_project',
|
||||||
help='optional alternate project name; auto-discovered project takes priority')
|
help='optional alternate project name; auto-discovered project '+
|
||||||
parser.add_argument('--hostname', dest='hostname', help='hostname of current machine.')
|
'takes priority')
|
||||||
|
parser.add_argument('--hostname', dest='hostname', help='hostname of '+
|
||||||
|
'current machine.')
|
||||||
parser.add_argument('--disableoffline', dest='offline',
|
parser.add_argument('--disableoffline', dest='offline',
|
||||||
action='store_false',
|
action='store_false',
|
||||||
help='disables offline time logging instead of queuing logged time')
|
help='disables offline time logging instead of queuing logged time')
|
||||||
|
@ -285,10 +293,14 @@ def get_user_agent(plugin):
|
||||||
return user_agent
|
return user_agent
|
||||||
|
|
||||||
|
|
||||||
def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None, entity=None,
|
def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
||||||
timestamp=None, isWrite=None, plugin=None, offline=None, entity_type='file',
|
entity=None, timestamp=None, isWrite=None, plugin=None,
|
||||||
hidefilenames=None, proxy=None, api_url=None, timeout=None, **kwargs):
|
offline=None, entity_type='file', hidefilenames=None,
|
||||||
|
proxy=None, api_url=None, timeout=None, **kwargs):
|
||||||
"""Sends heartbeat as POST request to WakaTime api server.
|
"""Sends heartbeat as POST request to WakaTime api server.
|
||||||
|
|
||||||
|
Returns `SUCCESS` when heartbeat was sent, otherwise returns an
|
||||||
|
error code constant.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not api_url:
|
if not api_url:
|
||||||
|
@ -333,7 +345,7 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
||||||
'Authorization': auth,
|
'Authorization': auth,
|
||||||
}
|
}
|
||||||
if hostname:
|
if hostname:
|
||||||
headers['X-Machine-Name'] = hostname
|
headers['X-Machine-Name'] = u(hostname)
|
||||||
proxies = {}
|
proxies = {}
|
||||||
if proxy:
|
if proxy:
|
||||||
proxies['https'] = proxy
|
proxies['https'] = proxy
|
||||||
|
@ -353,7 +365,7 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
||||||
response = None
|
response = None
|
||||||
try:
|
try:
|
||||||
response = session.post(api_url, data=request_body, headers=headers,
|
response = session.post(api_url, data=request_body, headers=headers,
|
||||||
proxies=proxies, timeout=timeout)
|
proxies=proxies, timeout=timeout)
|
||||||
except RequestException:
|
except RequestException:
|
||||||
exception_data = {
|
exception_data = {
|
||||||
sys.exc_info()[0].__name__: u(sys.exc_info()[1]),
|
sys.exc_info()[0].__name__: u(sys.exc_info()[1]),
|
||||||
|
@ -368,40 +380,74 @@ def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None,
|
||||||
else:
|
else:
|
||||||
log.error(exception_data)
|
log.error(exception_data)
|
||||||
else:
|
else:
|
||||||
response_code = response.status_code if response is not None else None
|
code = response.status_code if response is not None else None
|
||||||
response_content = response.text if response is not None else None
|
content = response.text if response is not None else None
|
||||||
if response_code == requests.codes.created or response_code == requests.codes.accepted:
|
if code == requests.codes.created or code == requests.codes.accepted:
|
||||||
log.debug({
|
log.debug({
|
||||||
'response_code': response_code,
|
'response_code': code,
|
||||||
})
|
})
|
||||||
session_cache.save(session)
|
session_cache.save(session)
|
||||||
return True
|
return SUCCESS
|
||||||
if offline:
|
if offline:
|
||||||
if response_code != 400:
|
if code != 400:
|
||||||
queue = Queue()
|
queue = Queue()
|
||||||
queue.push(data, json.dumps(stats), plugin)
|
queue.push(data, json.dumps(stats), plugin)
|
||||||
if response_code == 401:
|
if code == 401:
|
||||||
log.error({
|
log.error({
|
||||||
'response_code': response_code,
|
'response_code': code,
|
||||||
'response_content': response_content,
|
'response_content': content,
|
||||||
})
|
})
|
||||||
|
session_cache.delete()
|
||||||
|
return AUTH_ERROR
|
||||||
elif log.isEnabledFor(logging.DEBUG):
|
elif log.isEnabledFor(logging.DEBUG):
|
||||||
log.warn({
|
log.warn({
|
||||||
'response_code': response_code,
|
'response_code': code,
|
||||||
'response_content': response_content,
|
'response_content': content,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
log.error({
|
log.error({
|
||||||
'response_code': response_code,
|
'response_code': code,
|
||||||
'response_content': response_content,
|
'response_content': content,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
log.error({
|
log.error({
|
||||||
'response_code': response_code,
|
'response_code': code,
|
||||||
'response_content': response_content,
|
'response_content': content,
|
||||||
})
|
})
|
||||||
session_cache.delete()
|
session_cache.delete()
|
||||||
return False
|
return API_ERROR
|
||||||
|
|
||||||
|
|
||||||
|
def sync_offline_heartbeats(args, hostname):
|
||||||
|
"""Sends all heartbeats which were cached in the offline Queue."""
|
||||||
|
|
||||||
|
queue = Queue()
|
||||||
|
while True:
|
||||||
|
heartbeat = queue.pop()
|
||||||
|
if heartbeat is None:
|
||||||
|
break
|
||||||
|
status = send_heartbeat(
|
||||||
|
project=heartbeat['project'],
|
||||||
|
entity=heartbeat['entity'],
|
||||||
|
timestamp=heartbeat['time'],
|
||||||
|
branch=heartbeat['branch'],
|
||||||
|
hostname=hostname,
|
||||||
|
stats=json.loads(heartbeat['stats']),
|
||||||
|
key=args.key,
|
||||||
|
isWrite=heartbeat['is_write'],
|
||||||
|
plugin=heartbeat['plugin'],
|
||||||
|
offline=args.offline,
|
||||||
|
hidefilenames=args.hidefilenames,
|
||||||
|
entity_type=heartbeat['type'],
|
||||||
|
proxy=args.proxy,
|
||||||
|
api_url=args.api_url,
|
||||||
|
timeout=args.timeout,
|
||||||
|
)
|
||||||
|
if status != SUCCESS:
|
||||||
|
if status == AUTH_ERROR:
|
||||||
|
return AUTH_ERROR
|
||||||
|
break
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def execute(argv=None):
|
def execute(argv=None):
|
||||||
|
@ -438,37 +484,15 @@ def execute(argv=None):
|
||||||
kwargs['project'] = project
|
kwargs['project'] = project
|
||||||
kwargs['branch'] = branch
|
kwargs['branch'] = branch
|
||||||
kwargs['stats'] = stats
|
kwargs['stats'] = stats
|
||||||
kwargs['hostname'] = args.hostname or socket.gethostname()
|
hostname = args.hostname or socket.gethostname()
|
||||||
|
kwargs['hostname'] = hostname
|
||||||
kwargs['timeout'] = args.timeout
|
kwargs['timeout'] = args.timeout
|
||||||
|
|
||||||
if send_heartbeat(**kwargs):
|
status = send_heartbeat(**kwargs)
|
||||||
queue = Queue()
|
if status == SUCCESS:
|
||||||
while True:
|
return sync_offline_heartbeats(args, hostname)
|
||||||
heartbeat = queue.pop()
|
else:
|
||||||
if heartbeat is None:
|
return status
|
||||||
break
|
|
||||||
sent = send_heartbeat(
|
|
||||||
project=heartbeat['project'],
|
|
||||||
entity=heartbeat['entity'],
|
|
||||||
timestamp=heartbeat['time'],
|
|
||||||
branch=heartbeat['branch'],
|
|
||||||
hostname=kwargs['hostname'],
|
|
||||||
stats=json.loads(heartbeat['stats']),
|
|
||||||
key=args.key,
|
|
||||||
isWrite=heartbeat['is_write'],
|
|
||||||
plugin=heartbeat['plugin'],
|
|
||||||
offline=args.offline,
|
|
||||||
hidefilenames=args.hidefilenames,
|
|
||||||
entity_type=heartbeat['type'],
|
|
||||||
proxy=args.proxy,
|
|
||||||
api_url=args.api_url,
|
|
||||||
timeout=args.timeout,
|
|
||||||
)
|
|
||||||
if not sent:
|
|
||||||
break
|
|
||||||
return SUCCESS
|
|
||||||
|
|
||||||
return API_ERROR
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.debug('File does not exist; ignoring this heartbeat.')
|
log.debug('File does not exist; ignoring this heartbeat.')
|
||||||
|
|
|
@ -46,7 +46,7 @@ __version__ = '2.9.1'
|
||||||
__build__ = 0x020901
|
__build__ = 0x020901
|
||||||
__author__ = 'Kenneth Reitz'
|
__author__ = 'Kenneth Reitz'
|
||||||
__license__ = 'Apache 2.0'
|
__license__ = 'Apache 2.0'
|
||||||
__copyright__ = 'Copyright 2015 Kenneth Reitz'
|
__copyright__ = 'Copyright 2016 Kenneth Reitz'
|
||||||
|
|
||||||
# Attempt to enable urllib3's SNI support, if possible
|
# Attempt to enable urllib3's SNI support, if possible
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -65,7 +65,7 @@ class HTTPAdapter(BaseAdapter):
|
||||||
|
|
||||||
:param pool_connections: The number of urllib3 connection pools to cache.
|
:param pool_connections: The number of urllib3 connection pools to cache.
|
||||||
:param pool_maxsize: The maximum number of connections to save in the pool.
|
:param pool_maxsize: The maximum number of connections to save in the pool.
|
||||||
:param int max_retries: The maximum number of retries each connection
|
:param max_retries: The maximum number of retries each connection
|
||||||
should attempt. Note, this applies only to failed DNS lookups, socket
|
should attempt. Note, this applies only to failed DNS lookups, socket
|
||||||
connections and connection timeouts, never to requests where data has
|
connections and connection timeouts, never to requests where data has
|
||||||
made it to the server. By default, Requests does not retry failed
|
made it to the server. By default, Requests does not retry failed
|
||||||
|
|
|
@ -47,6 +47,15 @@ class HTTPBasicAuth(AuthBase):
|
||||||
self.username = username
|
self.username = username
|
||||||
self.password = password
|
self.password = password
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return all([
|
||||||
|
self.username == getattr(other, 'username', None),
|
||||||
|
self.password == getattr(other, 'password', None)
|
||||||
|
])
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
def __call__(self, r):
|
def __call__(self, r):
|
||||||
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
||||||
return r
|
return r
|
||||||
|
@ -221,3 +230,12 @@ class HTTPDigestAuth(AuthBase):
|
||||||
self._thread_local.num_401_calls = 1
|
self._thread_local.num_401_calls = 1
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return all([
|
||||||
|
self.username == getattr(other, 'username', None),
|
||||||
|
self.password == getattr(other, 'password', None)
|
||||||
|
])
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
|
@ -277,6 +277,12 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
||||||
dictionary[cookie.name] = cookie.value
|
dictionary[cookie.name] = cookie.value
|
||||||
return dictionary
|
return dictionary
|
||||||
|
|
||||||
|
def __contains__(self, name):
|
||||||
|
try:
|
||||||
|
return super(RequestsCookieJar, self).__contains__(name)
|
||||||
|
except CookieConflictError:
|
||||||
|
return True
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
"""Dict-like __getitem__() for compatibility with client code. Throws
|
"""Dict-like __getitem__() for compatibility with client code. Throws
|
||||||
exception if there are more than one cookie with name. In that case,
|
exception if there are more than one cookie with name. In that case,
|
||||||
|
|
|
@ -110,7 +110,7 @@ class SessionRedirectMixin(object):
|
||||||
resp.raw.read(decode_content=False)
|
resp.raw.read(decode_content=False)
|
||||||
|
|
||||||
if i >= self.max_redirects:
|
if i >= self.max_redirects:
|
||||||
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
|
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)
|
||||||
|
|
||||||
# Release the connection back into the pool.
|
# Release the connection back into the pool.
|
||||||
resp.close()
|
resp.close()
|
||||||
|
@ -553,19 +553,21 @@ class Session(SessionRedirectMixin):
|
||||||
if not isinstance(request, PreparedRequest):
|
if not isinstance(request, PreparedRequest):
|
||||||
raise ValueError('You can only send PreparedRequests.')
|
raise ValueError('You can only send PreparedRequests.')
|
||||||
|
|
||||||
checked_urls = set()
|
|
||||||
while request.url in self.redirect_cache:
|
|
||||||
checked_urls.add(request.url)
|
|
||||||
new_url = self.redirect_cache.get(request.url)
|
|
||||||
if new_url in checked_urls:
|
|
||||||
break
|
|
||||||
request.url = new_url
|
|
||||||
|
|
||||||
# Set up variables needed for resolve_redirects and dispatching of hooks
|
# Set up variables needed for resolve_redirects and dispatching of hooks
|
||||||
allow_redirects = kwargs.pop('allow_redirects', True)
|
allow_redirects = kwargs.pop('allow_redirects', True)
|
||||||
stream = kwargs.get('stream')
|
stream = kwargs.get('stream')
|
||||||
hooks = request.hooks
|
hooks = request.hooks
|
||||||
|
|
||||||
|
# Resolve URL in redirect cache, if available.
|
||||||
|
if allow_redirects:
|
||||||
|
checked_urls = set()
|
||||||
|
while request.url in self.redirect_cache:
|
||||||
|
checked_urls.add(request.url)
|
||||||
|
new_url = self.redirect_cache.get(request.url)
|
||||||
|
if new_url in checked_urls:
|
||||||
|
break
|
||||||
|
request.url = new_url
|
||||||
|
|
||||||
# Get the appropriate adapter to use
|
# Get the appropriate adapter to use
|
||||||
adapter = self.get_adapter(url=request.url)
|
adapter = self.get_adapter(url=request.url)
|
||||||
|
|
||||||
|
|
|
@ -44,9 +44,9 @@ class Git(BaseProject):
|
||||||
with open(head, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
with open(head, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
||||||
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
except IOError: # pragma: nocover
|
except IOError: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _project_base(self):
|
def _project_base(self):
|
||||||
|
|
|
@ -42,9 +42,9 @@ class Mercurial(BaseProject):
|
||||||
with open(branch_file, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
with open(branch_file, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
||||||
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
return u(fh.readline().strip().rsplit('/', 1)[-1])
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
except IOError: # pragma: nocover
|
except IOError: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
return u('default')
|
return u('default')
|
||||||
|
|
||||||
def _find_hg_config_dir(self, path):
|
def _find_hg_config_dir(self, path):
|
||||||
|
|
|
@ -41,9 +41,9 @@ class WakaTimeProjectFile(BaseProject):
|
||||||
self._project_name = u(fh.readline().strip())
|
self._project_name = u(fh.readline().strip())
|
||||||
self._project_branch = u(fh.readline().strip())
|
self._project_branch = u(fh.readline().strip())
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
except IOError: # pragma: nocover
|
except IOError: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('warn')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -57,7 +57,7 @@ class SessionCache(object):
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
except: # pragma: nocover
|
except: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
|
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
|
@ -72,7 +72,7 @@ class SessionCache(object):
|
||||||
try:
|
try:
|
||||||
conn, c = self.connect()
|
conn, c = self.connect()
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
return requests.session()
|
return requests.session()
|
||||||
|
|
||||||
session = None
|
session = None
|
||||||
|
@ -83,12 +83,12 @@ class SessionCache(object):
|
||||||
if row is not None:
|
if row is not None:
|
||||||
session = pickle.loads(row[0])
|
session = pickle.loads(row[0])
|
||||||
except: # pragma: nocover
|
except: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conn.close()
|
conn.close()
|
||||||
except: # pragma: nocover
|
except: # pragma: nocover
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
|
|
||||||
return session if session is not None else requests.session()
|
return session if session is not None else requests.session()
|
||||||
|
|
||||||
|
@ -105,4 +105,4 @@ class SessionCache(object):
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
|
|
|
@ -191,5 +191,5 @@ def get_file_contents(file_name):
|
||||||
with open(file_name, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
with open(file_name, 'r', encoding=sys.getfilesystemencoding()) as fh:
|
||||||
text = fh.read(512000)
|
text = fh.read(512000)
|
||||||
except:
|
except:
|
||||||
log.traceback()
|
log.traceback('debug')
|
||||||
return text
|
return text
|
||||||
|
|
Loading…
Reference in a new issue