2020-01-14 09:49:30 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
import binascii
|
2017-11-26 22:22:48 +00:00
|
|
|
from datetime import datetime
|
|
|
|
import operator
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import requests
|
2020-04-13 09:43:01 +00:00
|
|
|
import six
|
2017-11-26 22:22:48 +00:00
|
|
|
|
|
|
|
from .. import exceptions
|
|
|
|
from ..account import Account
|
2018-01-11 22:17:34 +00:00
|
|
|
from ..address import address, Address, SubAddress
|
2020-01-22 10:37:03 +00:00
|
|
|
from ..block import Block
|
2020-01-22 03:45:42 +00:00
|
|
|
from ..const import NET_MAIN, NET_TEST, NET_STAGE
|
2018-01-06 22:12:42 +00:00
|
|
|
from ..numbers import from_atomic, to_atomic, PaymentID
|
2018-02-19 22:06:16 +00:00
|
|
|
from ..seed import Seed
|
2018-01-25 07:50:09 +00:00
|
|
|
from ..transaction import Transaction, IncomingPayment, OutgoingPayment
|
2017-11-26 22:22:48 +00:00
|
|
|
|
|
|
|
_log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-01-14 22:51:18 +00:00
|
|
|
class JSONRPCDaemon(object):
|
2018-02-16 12:46:14 +00:00
|
|
|
"""
|
|
|
|
JSON RPC backend for Monero daemon
|
|
|
|
|
|
|
|
:param protocol: `http` or `https`
|
|
|
|
:param host: host name or IP
|
|
|
|
:param port: port number
|
|
|
|
:param path: path for JSON RPC requests (should not be changed)
|
2019-05-10 22:41:10 +00:00
|
|
|
:param timeout: request timeout
|
2019-10-25 20:48:56 +00:00
|
|
|
:param verify_ssl_certs: verify SSL certificates when connecting
|
2019-12-29 22:16:22 +00:00
|
|
|
:param proxy_url: a proxy to use
|
2018-02-16 12:46:14 +00:00
|
|
|
"""
|
2020-01-22 03:45:42 +00:00
|
|
|
|
|
|
|
_net = None
|
|
|
|
|
2020-08-18 06:41:41 +00:00
|
|
|
def __init__(self, protocol='http', host='127.0.0.1', port=34568, path='/json_rpc',
|
2019-12-29 22:16:22 +00:00
|
|
|
user='', password='', timeout=30, verify_ssl_certs=True, proxy_url=None):
|
2018-01-15 03:12:53 +00:00
|
|
|
self.url = '{protocol}://{host}:{port}'.format(
|
2018-01-14 22:51:18 +00:00
|
|
|
protocol=protocol,
|
|
|
|
host=host,
|
|
|
|
port=port)
|
|
|
|
_log.debug("JSONRPC daemon backend URL: {url}".format(url=self.url))
|
2018-09-21 13:21:20 +00:00
|
|
|
self.user = user
|
|
|
|
self.password = password
|
2019-05-10 22:41:10 +00:00
|
|
|
self.timeout = timeout
|
2019-10-21 09:45:49 +00:00
|
|
|
self.verify_ssl_certs = verify_ssl_certs
|
2019-12-29 22:16:22 +00:00
|
|
|
self.proxies = {protocol: proxy_url}
|
2018-01-14 22:51:18 +00:00
|
|
|
|
2020-01-22 03:45:42 +00:00
|
|
|
def _set_net(self, info):
|
|
|
|
if info['mainnet']:
|
|
|
|
self._net = NET_MAIN
|
|
|
|
if info['testnet']:
|
|
|
|
self._net = NET_TEST
|
|
|
|
if info['stagenet']:
|
|
|
|
self._net = NET_STAGE
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def info(self):
|
2018-01-14 22:51:18 +00:00
|
|
|
info = self.raw_jsonrpc_request('get_info')
|
2020-01-22 03:45:42 +00:00
|
|
|
self._set_net(info)
|
2018-01-14 22:51:18 +00:00
|
|
|
return info
|
|
|
|
|
2020-01-22 03:45:42 +00:00
|
|
|
def net(self):
|
|
|
|
if self._net:
|
|
|
|
return self._net
|
|
|
|
self.info()
|
|
|
|
return self._net
|
|
|
|
|
2018-01-30 23:21:53 +00:00
|
|
|
def send_transaction(self, blob, relay=True):
|
2018-01-16 00:27:07 +00:00
|
|
|
res = self.raw_request('/sendrawtransaction', {
|
2020-04-13 09:43:01 +00:00
|
|
|
'tx_as_hex': six.ensure_text(binascii.hexlify(blob)),
|
2018-01-30 23:21:53 +00:00
|
|
|
'do_not_relay': not relay})
|
2018-01-15 03:12:53 +00:00
|
|
|
if res['status'] == 'OK':
|
|
|
|
return res
|
|
|
|
raise exceptions.TransactionBroadcastError(
|
|
|
|
"{status}: {reason}".format(**res),
|
|
|
|
details=res)
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def mempool(self):
|
2018-01-22 02:55:08 +00:00
|
|
|
res = self.raw_request('/get_transaction_pool', {})
|
|
|
|
txs = []
|
2018-01-22 03:03:40 +00:00
|
|
|
for tx in res.get('transactions', []):
|
2018-01-22 02:55:08 +00:00
|
|
|
txs.append(Transaction(
|
|
|
|
hash=tx['id_hash'],
|
|
|
|
fee=from_atomic(tx['fee']),
|
2019-04-05 00:10:10 +00:00
|
|
|
timestamp=datetime.fromtimestamp(tx['receive_time']),
|
2020-01-25 00:29:43 +00:00
|
|
|
blob=binascii.unhexlify(tx['tx_blob']),
|
|
|
|
json=json.loads(tx['tx_json']),
|
2019-04-05 00:10:10 +00:00
|
|
|
confirmations=0))
|
2018-01-22 02:55:08 +00:00
|
|
|
return txs
|
|
|
|
|
2019-10-16 11:36:50 +00:00
|
|
|
def headers(self, start_height, end_height=None):
|
|
|
|
end_height = end_height or start_height
|
|
|
|
res = self.raw_jsonrpc_request('get_block_headers_range', {
|
|
|
|
'start_height': start_height,
|
|
|
|
'end_height': end_height})
|
|
|
|
if res['status'] == 'OK':
|
|
|
|
return res['headers']
|
2019-12-11 10:25:14 +00:00
|
|
|
raise exceptions.BackendException(res['status'])
|
2019-10-16 11:36:50 +00:00
|
|
|
|
2020-01-22 10:37:03 +00:00
|
|
|
def block(self, bhash=None, height=None):
|
|
|
|
data = {}
|
|
|
|
if bhash:
|
|
|
|
data['hash'] = bhash
|
|
|
|
if height:
|
|
|
|
data['height'] = height
|
|
|
|
res = self.raw_jsonrpc_request('get_block', data)
|
|
|
|
if res['status'] == 'OK':
|
|
|
|
bhdr = res['block_header']
|
|
|
|
sub_json = json.loads(res['json'])
|
|
|
|
data = {
|
|
|
|
'blob': res['blob'],
|
|
|
|
'hash': bhdr['hash'],
|
|
|
|
'height': bhdr['height'],
|
|
|
|
'timestamp': datetime.fromtimestamp(bhdr['timestamp']),
|
|
|
|
'version': (bhdr['major_version'], bhdr['minor_version']),
|
|
|
|
'difficulty': bhdr['difficulty'],
|
|
|
|
'nonce': bhdr['nonce'],
|
|
|
|
'orphan': bhdr['orphan_status'],
|
|
|
|
'prev_hash': bhdr['prev_hash'],
|
2020-01-24 11:38:33 +00:00
|
|
|
'reward': from_atomic(bhdr['reward']),
|
2020-01-22 10:37:03 +00:00
|
|
|
'transactions': self.transactions(
|
|
|
|
[bhdr['miner_tx_hash']] + sub_json['tx_hashes']),
|
|
|
|
}
|
|
|
|
return Block(**data)
|
|
|
|
raise exceptions.BackendException(res['status'])
|
|
|
|
|
2019-12-11 10:25:43 +00:00
|
|
|
def transactions(self, hashes):
|
|
|
|
res = self.raw_request('/get_transactions', {
|
|
|
|
'txs_hashes': hashes,
|
|
|
|
'decode_as_json': True})
|
|
|
|
if res['status'] != 'OK':
|
|
|
|
raise exceptions.BackendException(res['status'])
|
|
|
|
txs = []
|
|
|
|
for tx in res.get('txs', []):
|
2020-06-06 18:26:44 +00:00
|
|
|
as_json = json.loads(tx['as_json'])
|
2020-06-06 20:10:11 +00:00
|
|
|
fee = as_json.get('rct_signatures', {}).get('txnFee')
|
2019-12-11 10:25:43 +00:00
|
|
|
txs.append(Transaction(
|
|
|
|
hash=tx['tx_hash'],
|
2020-06-06 18:26:44 +00:00
|
|
|
fee=from_atomic(fee) if fee else None,
|
2019-12-11 10:25:43 +00:00
|
|
|
height=None if tx['in_pool'] else tx['block_height'],
|
2020-06-06 18:26:44 +00:00
|
|
|
timestamp=datetime.fromtimestamp(
|
|
|
|
tx['block_timestamp']) if 'block_timestamp' in tx else None,
|
2020-01-14 09:49:30 +00:00
|
|
|
blob=binascii.unhexlify(tx['as_hex']),
|
2020-06-06 18:26:44 +00:00
|
|
|
json=as_json))
|
2019-12-11 10:25:43 +00:00
|
|
|
return txs
|
|
|
|
|
2018-01-15 03:12:53 +00:00
|
|
|
def raw_request(self, path, data):
|
|
|
|
hdr = {'Content-Type': 'application/json'}
|
|
|
|
_log.debug(u"Request: {path}\nData: {data}".format(
|
|
|
|
path=path,
|
2019-04-02 20:13:51 +00:00
|
|
|
data=json.dumps(data, indent=2, sort_keys=True)))
|
2019-12-11 08:32:49 +00:00
|
|
|
auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
2019-05-10 22:41:10 +00:00
|
|
|
rsp = requests.post(
|
2019-12-11 08:32:49 +00:00
|
|
|
self.url + path, headers=hdr, data=json.dumps(data), auth=auth,
|
2019-12-29 22:16:22 +00:00
|
|
|
timeout=self.timeout, verify=self.verify_ssl_certs, proxies=self.proxies)
|
2018-01-15 03:12:53 +00:00
|
|
|
if rsp.status_code != 200:
|
|
|
|
raise RPCError("Invalid HTTP status {code} for path {path}.".format(
|
|
|
|
code=rsp.status_code,
|
|
|
|
path=path))
|
|
|
|
result = rsp.json()
|
2019-04-02 20:13:51 +00:00
|
|
|
_ppresult = json.dumps(result, indent=2, sort_keys=True)
|
2018-01-15 03:12:53 +00:00
|
|
|
_log.debug(u"Result:\n{result}".format(result=_ppresult))
|
|
|
|
return result
|
|
|
|
|
2018-01-14 22:51:18 +00:00
|
|
|
def raw_jsonrpc_request(self, method, params=None):
|
|
|
|
hdr = {'Content-Type': 'application/json'}
|
|
|
|
data = {'jsonrpc': '2.0', 'id': 0, 'method': method, 'params': params or {}}
|
|
|
|
_log.debug(u"Method: {method}\nParams:\n{params}".format(
|
|
|
|
method=method,
|
2019-04-02 20:13:51 +00:00
|
|
|
params=json.dumps(params, indent=2, sort_keys=True)))
|
2018-09-21 13:21:20 +00:00
|
|
|
auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
2019-05-10 22:41:10 +00:00
|
|
|
rsp = requests.post(
|
|
|
|
self.url + '/json_rpc', headers=hdr, data=json.dumps(data), auth=auth,
|
2019-12-29 22:16:22 +00:00
|
|
|
timeout=self.timeout, verify=self.verify_ssl_certs, proxies=self.proxies)
|
2019-10-21 09:45:49 +00:00
|
|
|
|
2018-09-21 13:21:20 +00:00
|
|
|
if rsp.status_code == 401:
|
|
|
|
raise Unauthorized("401 Unauthorized. Invalid RPC user name or password.")
|
|
|
|
elif rsp.status_code != 200:
|
2018-01-14 22:51:18 +00:00
|
|
|
raise RPCError("Invalid HTTP status {code} for method {method}.".format(
|
|
|
|
code=rsp.status_code,
|
|
|
|
method=method))
|
|
|
|
result = rsp.json()
|
2019-04-02 20:13:51 +00:00
|
|
|
_ppresult = json.dumps(result, indent=2, sort_keys=True)
|
2018-01-14 22:51:18 +00:00
|
|
|
_log.debug(u"Result:\n{result}".format(result=_ppresult))
|
|
|
|
|
|
|
|
if 'error' in result:
|
|
|
|
err = result['error']
|
|
|
|
_log.error(u"JSON RPC error:\n{result}".format(result=_ppresult))
|
|
|
|
raise RPCError(
|
|
|
|
"Method '{method}' failed with RPC Error of unknown code {code}, "
|
|
|
|
"message: {message}".format(method=method, data=data, result=result, **err))
|
|
|
|
return result['result']
|
|
|
|
|
|
|
|
|
2017-12-26 21:02:17 +00:00
|
|
|
class JSONRPCWallet(object):
|
2018-02-16 12:46:14 +00:00
|
|
|
"""
|
|
|
|
JSON RPC backend for Monero wallet (``monero-wallet-rpc``)
|
|
|
|
|
|
|
|
:param protocol: `http` or `https`
|
|
|
|
:param host: host name or IP
|
|
|
|
:param port: port number
|
|
|
|
:param path: path for JSON RPC requests (should not be changed)
|
|
|
|
:param user: username to authenticate with over RPC
|
|
|
|
:param password: password to authenticate with over RPC
|
2019-05-10 22:41:10 +00:00
|
|
|
:param timeout: request timeout
|
2019-10-21 09:45:49 +00:00
|
|
|
:param verify_ssl_certs: verify ssl certs for request
|
2020-01-29 12:05:25 +00:00
|
|
|
:param proxy_url: a proxy to use
|
2018-02-16 12:46:14 +00:00
|
|
|
"""
|
2017-12-27 00:49:59 +00:00
|
|
|
_master_address = None
|
|
|
|
|
2020-08-18 06:41:41 +00:00
|
|
|
def __init__(self, protocol='http', host='127.0.0.1', port=8888, path='/json_rpc',
|
2020-01-29 12:05:25 +00:00
|
|
|
user='', password='', timeout=30, verify_ssl_certs=True, proxy_url=None):
|
2017-11-26 22:22:48 +00:00
|
|
|
self.url = '{protocol}://{host}:{port}/json_rpc'.format(
|
|
|
|
protocol=protocol,
|
|
|
|
host=host,
|
|
|
|
port=port)
|
2018-01-14 22:51:18 +00:00
|
|
|
_log.debug("JSONRPC wallet backend URL: {url}".format(url=self.url))
|
2017-11-26 22:22:48 +00:00
|
|
|
self.user = user
|
|
|
|
self.password = password
|
2019-05-10 22:41:10 +00:00
|
|
|
self.timeout = timeout
|
2019-10-21 09:45:49 +00:00
|
|
|
self.verify_ssl_certs = verify_ssl_certs
|
2020-01-29 12:05:25 +00:00
|
|
|
self.proxies = {protocol: proxy_url}
|
2018-01-14 22:51:18 +00:00
|
|
|
_log.debug("JSONRPC wallet backend auth: '{user}'/'{stars}'".format(
|
2017-11-26 22:22:48 +00:00
|
|
|
user=user, stars=('*' * len(password)) if password else ''))
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def height(self):
|
2018-01-22 02:55:08 +00:00
|
|
|
return self.raw_request('getheight')['height']
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def spend_key(self):
|
2018-01-28 12:04:47 +00:00
|
|
|
return self.raw_request('query_key', {'key_type': 'spend_key'})['key']
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def view_key(self):
|
2018-01-14 05:28:28 +00:00
|
|
|
return self.raw_request('query_key', {'key_type': 'view_key'})['key']
|
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def seed(self):
|
2018-02-19 22:06:16 +00:00
|
|
|
return Seed(self.raw_request('query_key', {'key_type': 'mnemonic'})['key'])
|
2018-01-14 05:28:28 +00:00
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def accounts(self):
|
2017-11-26 22:22:48 +00:00
|
|
|
accounts = []
|
2019-04-02 20:13:51 +00:00
|
|
|
_accounts = self.raw_request('get_accounts')
|
2017-11-26 22:22:48 +00:00
|
|
|
idx = 0
|
2017-12-27 00:49:59 +00:00
|
|
|
self._master_address = Address(_accounts['subaddress_accounts'][0]['base_address'])
|
2017-11-26 22:22:48 +00:00
|
|
|
for _acc in _accounts['subaddress_accounts']:
|
|
|
|
assert idx == _acc['account_index']
|
2019-04-02 20:13:51 +00:00
|
|
|
accounts.append(Account(self, _acc['account_index'], label=_acc.get('label')))
|
2017-11-26 22:22:48 +00:00
|
|
|
idx += 1
|
|
|
|
return accounts
|
|
|
|
|
2018-01-11 22:17:34 +00:00
|
|
|
def new_account(self, label=None):
|
|
|
|
_account = self.raw_request('create_account', {'label': label})
|
2019-04-02 20:13:51 +00:00
|
|
|
# NOTE: the following should re-read label by _account.get('label') but the RPC
|
|
|
|
# doesn't return that detail here
|
|
|
|
return Account(self, _account['account_index'], label=label), SubAddress(_account['address'])
|
2018-01-11 22:17:34 +00:00
|
|
|
|
2019-10-07 21:36:31 +00:00
|
|
|
def addresses(self, account=0, addr_indices=None):
|
|
|
|
qdata = {'account_index': account}
|
|
|
|
if addr_indices:
|
|
|
|
qdata['address_index'] = addr_indices
|
|
|
|
_addresses = self.raw_request('getaddress', qdata)
|
2017-11-26 22:22:48 +00:00
|
|
|
addresses = [None] * (max(map(operator.itemgetter('address_index'), _addresses['addresses'])) + 1)
|
|
|
|
for _addr in _addresses['addresses']:
|
2018-01-11 22:17:34 +00:00
|
|
|
addresses[_addr['address_index']] = address(
|
|
|
|
_addr['address'],
|
|
|
|
label=_addr.get('label', None))
|
2017-11-26 22:22:48 +00:00
|
|
|
return addresses
|
|
|
|
|
2018-01-11 22:17:34 +00:00
|
|
|
def new_address(self, account=0, label=None):
|
|
|
|
_address = self.raw_request(
|
|
|
|
'create_address', {'account_index': account, 'label': label})
|
2019-09-09 10:57:06 +00:00
|
|
|
return SubAddress(_address['address']), _address['address_index']
|
2018-01-11 22:17:34 +00:00
|
|
|
|
2018-01-28 15:11:27 +00:00
|
|
|
def balances(self, account=0):
|
2017-11-26 22:22:48 +00:00
|
|
|
_balance = self.raw_request('getbalance', {'account_index': account})
|
|
|
|
return (from_atomic(_balance['balance']), from_atomic(_balance['unlocked_balance']))
|
|
|
|
|
2018-01-29 14:11:53 +00:00
|
|
|
def transfers_in(self, account, pmtfilter):
|
2018-01-30 18:56:36 +00:00
|
|
|
params = {'account_index': account, 'pending': False}
|
2018-01-29 14:11:53 +00:00
|
|
|
method = 'get_transfers'
|
2019-09-13 21:12:25 +00:00
|
|
|
if pmtfilter.tx_ids:
|
|
|
|
method = 'get_transfer_by_txid'
|
2018-01-29 14:11:53 +00:00
|
|
|
if pmtfilter.unconfirmed:
|
|
|
|
params['in'] = pmtfilter.confirmed
|
|
|
|
params['out'] = False
|
|
|
|
params['pool'] = True
|
|
|
|
else:
|
|
|
|
if pmtfilter.payment_ids:
|
|
|
|
method = 'get_bulk_payments'
|
2018-01-30 18:56:36 +00:00
|
|
|
params['payment_ids'] = list(map(str, pmtfilter.payment_ids))
|
2018-01-29 14:11:53 +00:00
|
|
|
else:
|
|
|
|
params['in'] = pmtfilter.confirmed
|
|
|
|
params['out'] = False
|
|
|
|
params['pool'] = False
|
|
|
|
if method == 'get_transfers':
|
|
|
|
if pmtfilter.min_height:
|
2018-02-05 17:15:06 +00:00
|
|
|
# NOTE: the API uses (min, max] range which is confusing
|
|
|
|
params['min_height'] = pmtfilter.min_height - 1
|
2018-01-29 14:11:53 +00:00
|
|
|
params['filter_by_height'] = True
|
|
|
|
if pmtfilter.max_height:
|
|
|
|
params['max_height'] = pmtfilter.max_height
|
|
|
|
params['filter_by_height'] = True
|
2019-09-13 21:12:25 +00:00
|
|
|
_pmts = self.raw_request(method, params)
|
|
|
|
pmts = _pmts.get('in', [])
|
|
|
|
elif method == 'get_transfer_by_txid':
|
|
|
|
pmts = []
|
|
|
|
for txid in pmtfilter.tx_ids:
|
|
|
|
params['txid'] = txid
|
|
|
|
try:
|
|
|
|
_pmts = self.raw_request(method, params, squelch_error_logging=True)
|
|
|
|
except exceptions.TransactionNotFound:
|
|
|
|
continue
|
|
|
|
pmts.extend(_pmts['transfers'])
|
2018-01-30 18:56:36 +00:00
|
|
|
else:
|
2018-02-05 17:15:06 +00:00
|
|
|
# NOTE: the API uses (min, max] range which is confusing
|
|
|
|
params['min_block_height'] = (pmtfilter.min_height or 1) - 1
|
2019-09-13 21:12:25 +00:00
|
|
|
_pmts = self.raw_request(method, params)
|
|
|
|
pmts = _pmts.get('payments', [])
|
2018-01-29 14:11:53 +00:00
|
|
|
if pmtfilter.unconfirmed:
|
|
|
|
pmts.extend(_pmts.get('pool', []))
|
|
|
|
return list(pmtfilter.filter(map(self._inpayment, pmts)))
|
|
|
|
|
|
|
|
def transfers_out(self, account, pmtfilter):
|
2019-09-13 21:12:25 +00:00
|
|
|
if pmtfilter.tx_ids:
|
|
|
|
pmts = []
|
|
|
|
for txid in pmtfilter.tx_ids:
|
|
|
|
try:
|
|
|
|
_pmts = self.raw_request(
|
|
|
|
'get_transfer_by_txid',
|
|
|
|
{'account_index': account, 'txid': txid},
|
|
|
|
squelch_error_logging=True)
|
|
|
|
except exceptions.TransactionNotFound:
|
|
|
|
continue
|
|
|
|
pmts.extend(_pmts['transfers'])
|
|
|
|
else:
|
|
|
|
_pmts = self.raw_request('get_transfers', {
|
|
|
|
'account_index': account,
|
|
|
|
'in': False,
|
|
|
|
'out': pmtfilter.confirmed,
|
|
|
|
'pool': False,
|
|
|
|
'pending': pmtfilter.unconfirmed})
|
|
|
|
pmts = _pmts.get('out', [])
|
|
|
|
if pmtfilter.unconfirmed:
|
|
|
|
pmts.extend(_pmts.get('pending', []))
|
2018-01-29 14:11:53 +00:00
|
|
|
return list(pmtfilter.filter(map(self._outpayment, pmts)))
|
2018-01-22 02:55:08 +00:00
|
|
|
|
2018-01-25 07:50:09 +00:00
|
|
|
def _paymentdict(self, data):
|
|
|
|
pid = data.get('payment_id', None)
|
2018-01-30 08:43:08 +00:00
|
|
|
laddr = data.get('address', None)
|
|
|
|
if laddr:
|
|
|
|
laddr = address(laddr)
|
2018-10-18 03:44:54 +00:00
|
|
|
result = {
|
2018-01-11 05:13:33 +00:00
|
|
|
'payment_id': None if pid is None else PaymentID(pid),
|
2018-01-25 07:50:09 +00:00
|
|
|
'amount': from_atomic(data['amount']),
|
|
|
|
'timestamp': datetime.fromtimestamp(data['timestamp']) if 'timestamp' in data else None,
|
2018-01-29 14:11:53 +00:00
|
|
|
'note': data.get('note', None),
|
|
|
|
'transaction': self._tx(data),
|
2018-01-30 08:43:08 +00:00
|
|
|
'local_address': laddr,
|
2017-11-29 03:38:29 +00:00
|
|
|
}
|
2018-10-18 03:44:54 +00:00
|
|
|
if 'destinations' in data:
|
|
|
|
result['destinations'] = [
|
2019-03-12 10:07:38 +00:00
|
|
|
(address(x['address']), from_atomic(x['amount']))
|
2018-10-18 03:44:54 +00:00
|
|
|
for x in data.get('destinations')
|
|
|
|
]
|
|
|
|
return result
|
|
|
|
|
2017-11-29 03:38:29 +00:00
|
|
|
|
2018-01-25 07:50:09 +00:00
|
|
|
def _inpayment(self, data):
|
2018-01-29 14:11:53 +00:00
|
|
|
return IncomingPayment(**self._paymentdict(data))
|
2018-01-25 07:50:09 +00:00
|
|
|
|
|
|
|
def _outpayment(self, data):
|
2018-01-29 14:11:53 +00:00
|
|
|
return OutgoingPayment(**self._paymentdict(data))
|
2018-01-25 07:50:09 +00:00
|
|
|
|
|
|
|
def _tx(self, data):
|
|
|
|
return Transaction(**{
|
|
|
|
'hash': data.get('txid', data.get('tx_hash')),
|
|
|
|
'fee': from_atomic(data['fee']) if 'fee' in data else None,
|
|
|
|
'key': data.get('key'),
|
|
|
|
'height': data.get('height', data.get('block_height')) or None,
|
|
|
|
'timestamp': datetime.fromtimestamp(data['timestamp']) if 'timestamp' in data else None,
|
2020-01-14 09:49:30 +00:00
|
|
|
'blob': binascii.unhexlify(data.get('blob', '')),
|
2018-10-18 06:15:20 +00:00
|
|
|
'confirmations': data.get('confirmations', None)
|
2018-01-25 07:50:09 +00:00
|
|
|
})
|
|
|
|
|
2018-10-11 15:19:26 +00:00
|
|
|
def export_outputs(self):
|
|
|
|
return self.raw_request('export_outputs')['outputs_data_hex']
|
|
|
|
|
|
|
|
def import_outputs(self, outputs_hex):
|
|
|
|
return self.raw_request(
|
|
|
|
'import_outputs',
|
|
|
|
{'outputs_data_hex': outputs_hex})['num_imported']
|
|
|
|
|
|
|
|
def export_key_images(self):
|
|
|
|
return self.raw_request('export_key_images')['signed_key_images']
|
|
|
|
|
|
|
|
def import_key_images(self, key_images):
|
|
|
|
_data = self.raw_request(
|
|
|
|
'import_key_images',
|
|
|
|
{'signed_key_images': key_images})
|
|
|
|
return (_data['height'], from_atomic(_data['spent']), from_atomic(_data['unspent']))
|
|
|
|
|
2018-10-18 23:32:04 +00:00
|
|
|
def transfer(self, destinations, priority,
|
2018-01-14 02:40:46 +00:00
|
|
|
payment_id=None, unlock_time=0, account=0,
|
|
|
|
relay=True):
|
2017-11-29 03:38:29 +00:00
|
|
|
data = {
|
|
|
|
'account_index': account,
|
|
|
|
'destinations': list(map(
|
2017-11-30 02:43:34 +00:00
|
|
|
lambda dst: {'address': str(address(dst[0])), 'amount': to_atomic(dst[1])},
|
2017-11-29 03:38:29 +00:00
|
|
|
destinations)),
|
|
|
|
'priority': priority,
|
|
|
|
'unlock_time': 0,
|
|
|
|
'get_tx_keys': True,
|
|
|
|
'get_tx_hex': True,
|
|
|
|
'new_algorithm': True,
|
2018-01-14 02:40:46 +00:00
|
|
|
'do_not_relay': not relay,
|
2017-11-29 03:38:29 +00:00
|
|
|
}
|
2018-01-11 05:13:33 +00:00
|
|
|
if payment_id is not None:
|
|
|
|
data['payment_id'] = str(PaymentID(payment_id))
|
2017-11-29 03:38:29 +00:00
|
|
|
_transfers = self.raw_request('transfer_split', data)
|
2018-01-07 00:26:50 +00:00
|
|
|
_pertx = [dict(_tx) for _tx in map(
|
|
|
|
lambda vs: zip(('txid', 'amount', 'fee', 'key', 'blob', 'payment_id'), vs),
|
|
|
|
zip(*[_transfers[k] for k in (
|
|
|
|
'tx_hash_list', 'amount_list', 'fee_list', 'tx_key_list', 'tx_blob_list')]))]
|
|
|
|
for d in _pertx:
|
|
|
|
d['payment_id'] = payment_id
|
2018-01-25 07:50:09 +00:00
|
|
|
return [self._tx(data) for data in _pertx]
|
2017-11-26 22:22:48 +00:00
|
|
|
|
2019-06-26 18:01:43 +00:00
|
|
|
def sweep_all(self, destination, priority, payment_id=None, subaddr_indices=None,
|
|
|
|
unlock_time=0, account=0, relay=True):
|
|
|
|
if not subaddr_indices:
|
|
|
|
# retrieve indices of all subaddresses with positive unlocked balance
|
|
|
|
bals = self.raw_request('get_balance', {'account_index': account})
|
|
|
|
subaddr_indices = []
|
|
|
|
for subaddr in bals['per_subaddress']:
|
|
|
|
if subaddr.get('unlocked_balance', 0):
|
|
|
|
subaddr_indices.append(subaddr['address_index'])
|
|
|
|
data = {
|
|
|
|
'account_index': account,
|
|
|
|
'address': str(address(destination)),
|
|
|
|
'subaddr_indices': list(subaddr_indices),
|
|
|
|
'priority': priority,
|
|
|
|
'unlock_time': 0,
|
|
|
|
'get_tx_keys': True,
|
|
|
|
'get_tx_hex': True,
|
|
|
|
'do_not_relay': not relay,
|
|
|
|
}
|
|
|
|
if payment_id is not None:
|
|
|
|
data['payment_id'] = str(PaymentID(payment_id))
|
|
|
|
_transfers = self.raw_request('sweep_all', data)
|
|
|
|
_pertx = [dict(_tx) for _tx in map(
|
|
|
|
lambda vs: zip(('txid', 'amount', 'fee', 'key', 'blob', 'payment_id'), vs),
|
|
|
|
zip(*[_transfers[k] for k in (
|
|
|
|
'tx_hash_list', 'amount_list', 'fee_list', 'tx_key_list', 'tx_blob_list')]))]
|
|
|
|
for d in _pertx:
|
|
|
|
d['payment_id'] = payment_id
|
|
|
|
return list(zip(
|
|
|
|
[self._tx(data) for data in _pertx],
|
|
|
|
map(from_atomic, _transfers['amount_list'])))
|
|
|
|
|
2018-02-14 13:06:41 +00:00
|
|
|
def raw_request(self, method, params=None, squelch_error_logging=False):
|
2017-11-26 22:22:48 +00:00
|
|
|
hdr = {'Content-Type': 'application/json'}
|
|
|
|
data = {'jsonrpc': '2.0', 'id': 0, 'method': method, 'params': params or {}}
|
|
|
|
_log.debug(u"Method: {method}\nParams:\n{params}".format(
|
|
|
|
method=method,
|
2019-04-02 20:13:51 +00:00
|
|
|
params=json.dumps(params, indent=2, sort_keys=True)))
|
2017-11-26 22:22:48 +00:00
|
|
|
auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
2019-05-10 22:41:10 +00:00
|
|
|
rsp = requests.post(
|
2019-10-21 09:45:49 +00:00
|
|
|
self.url, headers=hdr, data=json.dumps(data), auth=auth,
|
2020-01-29 12:05:25 +00:00
|
|
|
timeout=self.timeout, verify=self.verify_ssl_certs, proxies=self.proxies)
|
2019-10-21 09:45:49 +00:00
|
|
|
|
2017-11-26 22:22:48 +00:00
|
|
|
if rsp.status_code == 401:
|
|
|
|
raise Unauthorized("401 Unauthorized. Invalid RPC user name or password.")
|
|
|
|
elif rsp.status_code != 200:
|
|
|
|
raise RPCError("Invalid HTTP status {code} for method {method}.".format(
|
|
|
|
code=rsp.status_code,
|
|
|
|
method=method))
|
|
|
|
result = rsp.json()
|
2019-04-02 20:13:51 +00:00
|
|
|
_ppresult = json.dumps(result, indent=2, sort_keys=True)
|
2017-11-26 22:22:48 +00:00
|
|
|
_log.debug(u"Result:\n{result}".format(result=_ppresult))
|
|
|
|
|
|
|
|
if 'error' in result:
|
|
|
|
err = result['error']
|
2019-09-13 21:12:25 +00:00
|
|
|
if not squelch_error_logging:
|
|
|
|
_log.error(u"JSON RPC error:\n{result}".format(result=_ppresult))
|
2017-11-26 22:22:48 +00:00
|
|
|
if err['code'] in _err2exc:
|
2017-11-29 03:38:29 +00:00
|
|
|
raise _err2exc[err['code']](err['message'])
|
2017-11-26 22:22:48 +00:00
|
|
|
else:
|
|
|
|
raise RPCError(
|
|
|
|
"Method '{method}' failed with RPC Error of unknown code {code}, "
|
|
|
|
"message: {message}".format(method=method, data=data, result=result, **err))
|
|
|
|
return result['result']
|
|
|
|
|
|
|
|
|
2017-11-29 03:38:29 +00:00
|
|
|
class RPCError(exceptions.BackendException):
|
|
|
|
pass
|
2017-11-26 22:22:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Unauthorized(RPCError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class MethodNotFound(RPCError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
_err2exc = {
|
2017-11-29 03:38:29 +00:00
|
|
|
-2: exceptions.WrongAddress,
|
2019-09-13 22:34:25 +00:00
|
|
|
-4: exceptions.GenericTransferError,
|
2018-01-11 02:18:22 +00:00
|
|
|
-5: exceptions.WrongPaymentId,
|
2018-01-22 03:18:00 +00:00
|
|
|
-8: exceptions.TransactionNotFound,
|
2018-10-11 15:19:26 +00:00
|
|
|
-9: exceptions.SignatureCheckFailed,
|
2019-10-07 21:36:31 +00:00
|
|
|
-14: exceptions.AccountIndexOutOfBound,
|
|
|
|
-15: exceptions.AddressIndexOutOfBound,
|
2018-01-11 05:14:34 +00:00
|
|
|
-16: exceptions.TransactionNotPossible,
|
2018-01-22 03:51:56 +00:00
|
|
|
-17: exceptions.NotEnoughMoney,
|
2017-11-29 03:38:29 +00:00
|
|
|
-20: exceptions.AmountIsZero,
|
2019-10-08 08:22:02 +00:00
|
|
|
-29: exceptions.WalletIsWatchOnly,
|
2018-10-18 21:20:27 +00:00
|
|
|
-37: exceptions.NotEnoughUnlockedMoney,
|
|
|
|
-38: exceptions.NoDaemonConnection,
|
|
|
|
-43: exceptions.WalletIsNotDeterministic, # https://github.com/monero-project/monero/pull/4653
|
2017-11-26 22:22:48 +00:00
|
|
|
-32601: MethodNotFound,
|
|
|
|
}
|