mirror of
https://git.wownero.com/wownero/RandomWOW.git
synced 2024-08-15 00:23:14 +00:00
Avoid argon2 symbol clashes with libsodium
This commit is contained in:
parent
ac22aed121
commit
ab6076ae58
6 changed files with 54 additions and 54 deletions
|
@ -61,13 +61,13 @@ along with RandomX. If not, see<http://www.gnu.org/licenses/>.
|
|||
#endif
|
||||
|
||||
/***************Instance and Position constructors**********/
|
||||
void init_block_value(block *b, uint8_t in) { memset(b->v, in, sizeof(b->v)); }
|
||||
void rxa2_init_block_value(block *b, uint8_t in) { memset(b->v, in, sizeof(b->v)); }
|
||||
|
||||
void copy_block(block *dst, const block *src) {
|
||||
void rxa2_copy_block(block *dst, const block *src) {
|
||||
memcpy(dst->v, src->v, sizeof(uint64_t) * ARGON2_QWORDS_IN_BLOCK);
|
||||
}
|
||||
|
||||
void xor_block(block *dst, const block *src) {
|
||||
void rxa2_xor_block(block *dst, const block *src) {
|
||||
int i;
|
||||
for (i = 0; i < ARGON2_QWORDS_IN_BLOCK; ++i) {
|
||||
dst->v[i] ^= src->v[i];
|
||||
|
@ -90,7 +90,7 @@ static void store_block(void *output, const block *src) {
|
|||
|
||||
/***************Memory functions*****************/
|
||||
|
||||
int allocate_memory(const argon2_context *context, uint8_t **memory,
|
||||
int rxa2_allocate_memory(const argon2_context *context, uint8_t **memory,
|
||||
size_t num, size_t size) {
|
||||
size_t memory_size = num * size;
|
||||
if (memory == NULL) {
|
||||
|
@ -117,10 +117,10 @@ int allocate_memory(const argon2_context *context, uint8_t **memory,
|
|||
return ARGON2_OK;
|
||||
}
|
||||
|
||||
void free_memory(const argon2_context *context, uint8_t *memory,
|
||||
void rxa2_free_memory(const argon2_context *context, uint8_t *memory,
|
||||
size_t num, size_t size) {
|
||||
size_t memory_size = num * size;
|
||||
clear_internal_memory(memory, memory_size);
|
||||
rxa2_clear_internal_memory(memory, memory_size);
|
||||
if (context->free_cbk) {
|
||||
(context->free_cbk)(memory, memory_size);
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ void free_memory(const argon2_context *context, uint8_t *memory,
|
|||
}
|
||||
}
|
||||
|
||||
void NOT_OPTIMIZED secure_wipe_memory(void *v, size_t n) {
|
||||
void NOT_OPTIMIZED rxa2_secure_wipe_memory(void *v, size_t n) {
|
||||
#if defined(_MSC_VER) && VC_GE_2005(_MSC_VER)
|
||||
SecureZeroMemory(v, n);
|
||||
#elif defined memset_s
|
||||
|
@ -144,13 +144,13 @@ void NOT_OPTIMIZED secure_wipe_memory(void *v, size_t n) {
|
|||
|
||||
/* Memory clear flag defaults to true. */
|
||||
#define FLAG_clear_internal_memory 0
|
||||
void clear_internal_memory(void *v, size_t n) {
|
||||
void rxa2_clear_internal_memory(void *v, size_t n) {
|
||||
if (FLAG_clear_internal_memory && v) {
|
||||
secure_wipe_memory(v, n);
|
||||
rxa2_secure_wipe_memory(v, n);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t index_alpha(const argon2_instance_t *instance,
|
||||
uint32_t rxa2_index_alpha(const argon2_instance_t *instance,
|
||||
const argon2_position_t *position, uint32_t pseudo_rand,
|
||||
int same_lane) {
|
||||
/*
|
||||
|
@ -232,7 +232,7 @@ static int fill_memory_blocks_st(argon2_instance_t *instance) {
|
|||
for (s = 0; s < ARGON2_SYNC_POINTS; ++s) {
|
||||
for (l = 0; l < instance->lanes; ++l) {
|
||||
argon2_position_t position = { r, l, (uint8_t)s, 0 };
|
||||
fill_segment(instance, position);
|
||||
rxa2_fill_segment(instance, position);
|
||||
}
|
||||
}
|
||||
#ifdef GENKAT
|
||||
|
@ -242,14 +242,14 @@ static int fill_memory_blocks_st(argon2_instance_t *instance) {
|
|||
return ARGON2_OK;
|
||||
}
|
||||
|
||||
int fill_memory_blocks(argon2_instance_t *instance) {
|
||||
int rxa2_fill_memory_blocks(argon2_instance_t *instance) {
|
||||
if (instance == NULL || instance->lanes == 0) {
|
||||
return ARGON2_INCORRECT_PARAMETER;
|
||||
}
|
||||
return fill_memory_blocks_st(instance);
|
||||
}
|
||||
|
||||
int validate_inputs(const argon2_context *context) {
|
||||
int rxa2_validate_inputs(const argon2_context *context) {
|
||||
if (NULL == context) {
|
||||
return ARGON2_INCORRECT_PARAMETER;
|
||||
}
|
||||
|
@ -378,7 +378,7 @@ int validate_inputs(const argon2_context *context) {
|
|||
return ARGON2_OK;
|
||||
}
|
||||
|
||||
void fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance) {
|
||||
void rxa2_fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance) {
|
||||
uint32_t l;
|
||||
/* Make the first and second block in each lane as G(H0||0||i) or
|
||||
G(H0||1||i) */
|
||||
|
@ -387,21 +387,21 @@ void fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance) {
|
|||
|
||||
store32(blockhash + ARGON2_PREHASH_DIGEST_LENGTH, 0);
|
||||
store32(blockhash + ARGON2_PREHASH_DIGEST_LENGTH + 4, l);
|
||||
blake2b_long(blockhash_bytes, ARGON2_BLOCK_SIZE, blockhash,
|
||||
rxa2_blake2b_long(blockhash_bytes, ARGON2_BLOCK_SIZE, blockhash,
|
||||
ARGON2_PREHASH_SEED_LENGTH);
|
||||
load_block(&instance->memory[l * instance->lane_length + 0],
|
||||
blockhash_bytes);
|
||||
|
||||
store32(blockhash + ARGON2_PREHASH_DIGEST_LENGTH, 1);
|
||||
blake2b_long(blockhash_bytes, ARGON2_BLOCK_SIZE, blockhash,
|
||||
rxa2_blake2b_long(blockhash_bytes, ARGON2_BLOCK_SIZE, blockhash,
|
||||
ARGON2_PREHASH_SEED_LENGTH);
|
||||
load_block(&instance->memory[l * instance->lane_length + 1],
|
||||
blockhash_bytes);
|
||||
}
|
||||
clear_internal_memory(blockhash_bytes, ARGON2_BLOCK_SIZE);
|
||||
rxa2_clear_internal_memory(blockhash_bytes, ARGON2_BLOCK_SIZE);
|
||||
}
|
||||
|
||||
void initial_hash(uint8_t *blockhash, argon2_context *context, argon2_type type) {
|
||||
void rxa2_initial_hash(uint8_t *blockhash, argon2_context *context, argon2_type type) {
|
||||
blake2b_state BlakeHash;
|
||||
uint8_t value[sizeof(uint32_t)];
|
||||
|
||||
|
@ -437,7 +437,7 @@ void initial_hash(uint8_t *blockhash, argon2_context *context, argon2_type type)
|
|||
context->pwdlen);
|
||||
|
||||
if (context->flags & ARGON2_FLAG_CLEAR_PASSWORD) {
|
||||
secure_wipe_memory(context->pwd, context->pwdlen);
|
||||
rxa2_secure_wipe_memory(context->pwd, context->pwdlen);
|
||||
context->pwdlen = 0;
|
||||
}
|
||||
}
|
||||
|
@ -457,7 +457,7 @@ void initial_hash(uint8_t *blockhash, argon2_context *context, argon2_type type)
|
|||
context->secretlen);
|
||||
|
||||
if (context->flags & ARGON2_FLAG_CLEAR_SECRET) {
|
||||
secure_wipe_memory(context->secret, context->secretlen);
|
||||
rxa2_secure_wipe_memory(context->secret, context->secretlen);
|
||||
context->secretlen = 0;
|
||||
}
|
||||
}
|
||||
|
@ -473,7 +473,7 @@ void initial_hash(uint8_t *blockhash, argon2_context *context, argon2_type type)
|
|||
blake2b_final(&BlakeHash, blockhash, ARGON2_PREHASH_DIGEST_LENGTH);
|
||||
}
|
||||
|
||||
int argon_initialize(argon2_instance_t *instance, argon2_context *context) {
|
||||
int rxa2_argon_initialize(argon2_instance_t *instance, argon2_context *context) {
|
||||
uint8_t blockhash[ARGON2_PREHASH_SEED_LENGTH];
|
||||
int result = ARGON2_OK;
|
||||
|
||||
|
@ -491,17 +491,17 @@ int argon_initialize(argon2_instance_t *instance, argon2_context *context) {
|
|||
/* H_0 + 8 extra bytes to produce the first blocks */
|
||||
/* uint8_t blockhash[ARGON2_PREHASH_SEED_LENGTH]; */
|
||||
/* Hashing all inputs */
|
||||
initial_hash(blockhash, context, instance->type);
|
||||
rxa2_initial_hash(blockhash, context, instance->type);
|
||||
/* Zeroing 8 extra bytes */
|
||||
clear_internal_memory(blockhash + ARGON2_PREHASH_DIGEST_LENGTH,
|
||||
rxa2_clear_internal_memory(blockhash + ARGON2_PREHASH_DIGEST_LENGTH,
|
||||
ARGON2_PREHASH_SEED_LENGTH -
|
||||
ARGON2_PREHASH_DIGEST_LENGTH);
|
||||
|
||||
/* 3. Creating first blocks, we always have at least two blocks in a slice
|
||||
*/
|
||||
fill_first_blocks(blockhash, instance);
|
||||
rxa2_fill_first_blocks(blockhash, instance);
|
||||
/* Clearing the hash */
|
||||
clear_internal_memory(blockhash, ARGON2_PREHASH_SEED_LENGTH);
|
||||
rxa2_clear_internal_memory(blockhash, ARGON2_PREHASH_SEED_LENGTH);
|
||||
|
||||
return ARGON2_OK;
|
||||
}
|
||||
|
|
|
@ -67,13 +67,13 @@ typedef struct block_ { uint64_t v[ARGON2_QWORDS_IN_BLOCK]; } block;
|
|||
/*****************Functions that work with the block******************/
|
||||
|
||||
/* Initialize each byte of the block with @in */
|
||||
void init_block_value(block *b, uint8_t in);
|
||||
void rxa2_init_block_value(block *b, uint8_t in);
|
||||
|
||||
/* Copy block @src to block @dst */
|
||||
void copy_block(block *dst, const block *src);
|
||||
void rxa2_copy_block(block *dst, const block *src);
|
||||
|
||||
/* XOR @src onto @dst bytewise */
|
||||
void xor_block(block *dst, const block *src);
|
||||
void rxa2_xor_block(block *dst, const block *src);
|
||||
|
||||
/*
|
||||
* Argon2 instance: memory pointer, number of passes, amount of memory, type,
|
||||
|
@ -122,7 +122,7 @@ typedef struct Argon2_thread_data {
|
|||
* @param num the number of elements to be allocated
|
||||
* @return ARGON2_OK if @memory is a valid pointer and memory is allocated
|
||||
*/
|
||||
int allocate_memory(const argon2_context *context, uint8_t **memory,
|
||||
int rxa2_allocate_memory(const argon2_context *context, uint8_t **memory,
|
||||
size_t num, size_t size);
|
||||
|
||||
/*
|
||||
|
@ -133,7 +133,7 @@ int allocate_memory(const argon2_context *context, uint8_t **memory,
|
|||
* @param size the size in bytes for each element to be deallocated
|
||||
* @param num the number of elements to be deallocated
|
||||
*/
|
||||
void free_memory(const argon2_context *context, uint8_t *memory,
|
||||
void rxa2_free_memory(const argon2_context *context, uint8_t *memory,
|
||||
size_t num, size_t size);
|
||||
|
||||
/* Function that securely cleans the memory. This ignores any flags set
|
||||
|
@ -141,14 +141,14 @@ void free_memory(const argon2_context *context, uint8_t *memory,
|
|||
* @param mem Pointer to the memory
|
||||
* @param s Memory size in bytes
|
||||
*/
|
||||
void secure_wipe_memory(void *v, size_t n);
|
||||
void rxa2_secure_wipe_memory(void *v, size_t n);
|
||||
|
||||
/* Function that securely clears the memory if FLAG_clear_internal_memory is
|
||||
* set. If the flag isn't set, this function does nothing.
|
||||
* @param mem Pointer to the memory
|
||||
* @param s Memory size in bytes
|
||||
*/
|
||||
void clear_internal_memory(void *v, size_t n);
|
||||
void rxa2_clear_internal_memory(void *v, size_t n);
|
||||
|
||||
/*
|
||||
* Computes absolute position of reference block in the lane following a skewed
|
||||
|
@ -160,7 +160,7 @@ void clear_internal_memory(void *v, size_t n);
|
|||
* If so we can reference the current segment
|
||||
* @pre All pointers must be valid
|
||||
*/
|
||||
uint32_t index_alpha(const argon2_instance_t *instance,
|
||||
uint32_t rxa2_index_alpha(const argon2_instance_t *instance,
|
||||
const argon2_position_t *position, uint32_t pseudo_rand,
|
||||
int same_lane);
|
||||
|
||||
|
@ -171,7 +171,7 @@ uint32_t index_alpha(const argon2_instance_t *instance,
|
|||
* @return ARGON2_OK if everything is all right, otherwise one of error codes
|
||||
* (all defined in <argon2.h>
|
||||
*/
|
||||
int validate_inputs(const argon2_context *context);
|
||||
int rxa2_validate_inputs(const argon2_context *context);
|
||||
|
||||
/*
|
||||
* Hashes all the inputs into @a blockhash[PREHASH_DIGEST_LENGTH], clears
|
||||
|
@ -183,7 +183,7 @@ int validate_inputs(const argon2_context *context);
|
|||
* @pre @a blockhash must have at least @a PREHASH_DIGEST_LENGTH bytes
|
||||
* allocated
|
||||
*/
|
||||
void initial_hash(uint8_t *blockhash, argon2_context *context,
|
||||
void rxa2_initial_hash(uint8_t *blockhash, argon2_context *context,
|
||||
argon2_type type);
|
||||
|
||||
/*
|
||||
|
@ -192,7 +192,7 @@ void initial_hash(uint8_t *blockhash, argon2_context *context,
|
|||
* @param blockhash Pointer to the pre-hashing digest
|
||||
* @pre blockhash must point to @a PREHASH_SEED_LENGTH allocated values
|
||||
*/
|
||||
void fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance);
|
||||
void rxa2_fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance);
|
||||
|
||||
/*
|
||||
* Function allocates memory, hashes the inputs with Blake, and creates first
|
||||
|
@ -204,7 +204,7 @@ void fill_first_blocks(uint8_t *blockhash, const argon2_instance_t *instance);
|
|||
* @return Zero if successful, -1 if memory failed to allocate. @context->state
|
||||
* will be modified if successful.
|
||||
*/
|
||||
int argon_initialize(argon2_instance_t *instance, argon2_context *context);
|
||||
int rxa2_argon_initialize(argon2_instance_t *instance, argon2_context *context);
|
||||
|
||||
/*
|
||||
* XORing the last block of each lane, hashing it, making the tag. Deallocates
|
||||
|
@ -217,7 +217,7 @@ int argon_initialize(argon2_instance_t *instance, argon2_context *context);
|
|||
* @pre if context->free_cbk is not NULL, it should point to a function that
|
||||
* deallocates memory
|
||||
*/
|
||||
void finalize(const argon2_context *context, argon2_instance_t *instance);
|
||||
void rxa2_finalize(const argon2_context *context, argon2_instance_t *instance);
|
||||
|
||||
/*
|
||||
* Function that fills the segment using previous segments also from other
|
||||
|
@ -227,7 +227,7 @@ void finalize(const argon2_context *context, argon2_instance_t *instance);
|
|||
* @param position Current position
|
||||
* @pre all block pointers must be valid
|
||||
*/
|
||||
void fill_segment(const argon2_instance_t *instance,
|
||||
void rxa2_fill_segment(const argon2_instance_t *instance,
|
||||
argon2_position_t position);
|
||||
|
||||
/*
|
||||
|
@ -236,7 +236,7 @@ void fill_segment(const argon2_instance_t *instance,
|
|||
* @param instance Pointer to the current instance
|
||||
* @return ARGON2_OK if successful, @context->state
|
||||
*/
|
||||
int fill_memory_blocks(argon2_instance_t *instance);
|
||||
int rxa2_fill_memory_blocks(argon2_instance_t *instance);
|
||||
|
||||
#if defined(__cplusplus)
|
||||
}
|
||||
|
|
|
@ -48,13 +48,13 @@ static void fill_block(const block *prev_block, const block *ref_block,
|
|||
block blockR, block_tmp;
|
||||
unsigned i;
|
||||
|
||||
copy_block(&blockR, ref_block);
|
||||
xor_block(&blockR, prev_block);
|
||||
copy_block(&block_tmp, &blockR);
|
||||
rxa2_copy_block(&blockR, ref_block);
|
||||
rxa2_xor_block(&blockR, prev_block);
|
||||
rxa2_copy_block(&block_tmp, &blockR);
|
||||
/* Now blockR = ref_block + prev_block and block_tmp = ref_block + prev_block */
|
||||
if (with_xor) {
|
||||
/* Saving the next block contents for XOR over: */
|
||||
xor_block(&block_tmp, next_block);
|
||||
rxa2_xor_block(&block_tmp, next_block);
|
||||
/* Now blockR = ref_block + prev_block and
|
||||
block_tmp = ref_block + prev_block + next_block */
|
||||
}
|
||||
|
@ -83,8 +83,8 @@ static void fill_block(const block *prev_block, const block *ref_block,
|
|||
blockR.v[2 * i + 113]);
|
||||
}
|
||||
|
||||
copy_block(next_block, &block_tmp);
|
||||
xor_block(next_block, &blockR);
|
||||
rxa2_copy_block(next_block, &block_tmp);
|
||||
rxa2_xor_block(next_block, &blockR);
|
||||
}
|
||||
|
||||
static void next_addresses(block *address_block, block *input_block,
|
||||
|
@ -94,7 +94,7 @@ static void next_addresses(block *address_block, block *input_block,
|
|||
fill_block(zero_block, address_block, address_block, 0);
|
||||
}
|
||||
|
||||
void fill_segment(const argon2_instance_t *instance,
|
||||
void rxa2_fill_segment(const argon2_instance_t *instance,
|
||||
argon2_position_t position) {
|
||||
block *ref_block = NULL, *curr_block = NULL;
|
||||
block address_block, input_block, zero_block;
|
||||
|
@ -114,8 +114,8 @@ void fill_segment(const argon2_instance_t *instance,
|
|||
(position.slice < ARGON2_SYNC_POINTS / 2));
|
||||
|
||||
if (data_independent_addressing) {
|
||||
init_block_value(&zero_block, 0);
|
||||
init_block_value(&input_block, 0);
|
||||
rxa2_init_block_value(&zero_block, 0);
|
||||
rxa2_init_block_value(&input_block, 0);
|
||||
|
||||
input_block.v[0] = position.pass;
|
||||
input_block.v[1] = position.lane;
|
||||
|
@ -180,7 +180,7 @@ void fill_segment(const argon2_instance_t *instance,
|
|||
* lane.
|
||||
*/
|
||||
position.index = i;
|
||||
ref_index = index_alpha(instance, &position, pseudo_rand & 0xFFFFFFFF,
|
||||
ref_index = rxa2_index_alpha(instance, &position, pseudo_rand & 0xFFFFFFFF,
|
||||
ref_lane == position.lane);
|
||||
|
||||
/* 2 Creating a new block */
|
||||
|
|
|
@ -88,7 +88,7 @@ extern "C" {
|
|||
const void *key, size_t keylen);
|
||||
|
||||
/* Argon2 Team - Begin Code */
|
||||
int blake2b_long(void *out, size_t outlen, const void *in, size_t inlen);
|
||||
int rxa2_blake2b_long(void *out, size_t outlen, const void *in, size_t inlen);
|
||||
/* Argon2 Team - End Code */
|
||||
|
||||
#if defined(__cplusplus)
|
||||
|
|
|
@ -338,7 +338,7 @@ fail:
|
|||
}
|
||||
|
||||
/* Argon2 Team - Begin Code */
|
||||
int blake2b_long(void *pout, size_t outlen, const void *in, size_t inlen) {
|
||||
int rxa2_blake2b_long(void *pout, size_t outlen, const void *in, size_t inlen) {
|
||||
uint8_t *out = (uint8_t *)pout;
|
||||
blake2b_state blake_state;
|
||||
uint8_t outlen_bytes[sizeof(uint32_t)] = { 0 };
|
||||
|
|
|
@ -95,9 +95,9 @@ namespace randomx {
|
|||
/* 3. Initialization: Hashing inputs, allocating memory, filling first
|
||||
* blocks
|
||||
*/
|
||||
argon_initialize(&instance, &context);
|
||||
rxa2_argon_initialize(&instance, &context);
|
||||
|
||||
fill_memory_blocks(&instance);
|
||||
rxa2_fill_memory_blocks(&instance);
|
||||
|
||||
cache->reciprocalCache.clear();
|
||||
randomx::Blake2Generator gen(key, keySize);
|
||||
|
|
Loading…
Reference in a new issue