vendor/libsodium/src/libsodium/sodium/utils.c in rbnacl-libsodium-1.0.11 vs vendor/libsodium/src/libsodium/sodium/utils.c in rbnacl-libsodium-1.0.13
- old
+ new
@@ -19,62 +19,67 @@
# include <wincrypt.h>
#else
# include <unistd.h>
#endif
-#include "utils.h"
#include "randombytes.h"
+#include "utils.h"
#ifndef ENOSYS
# define ENOSYS ENXIO
#endif
-#if defined(_WIN32) && (!defined(WINAPI_FAMILY) || WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP)
+#if defined(_WIN32) && \
+ (!defined(WINAPI_FAMILY) || WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP)
# define WINAPI_DESKTOP
#endif
#define CANARY_SIZE 16U
-#define GARBAGE_VALUE 0xd0
+#define GARBAGE_VALUE 0xdb
#ifndef MAP_NOCORE
# define MAP_NOCORE 0
#endif
#if !defined(MAP_ANON) && defined(MAP_ANONYMOUS)
# define MAP_ANON MAP_ANONYMOUS
#endif
-#if defined(WINAPI_DESKTOP) || (defined(MAP_ANON) && defined(HAVE_MMAP)) || defined(HAVE_POSIX_MEMALIGN)
+#if defined(WINAPI_DESKTOP) || (defined(MAP_ANON) && defined(HAVE_MMAP)) || \
+ defined(HAVE_POSIX_MEMALIGN)
# define HAVE_ALIGNED_MALLOC
#endif
-#if defined(HAVE_MPROTECT) && !(defined(PROT_NONE) && defined(PROT_READ) && defined(PROT_WRITE))
+#if defined(HAVE_MPROTECT) && \
+ !(defined(PROT_NONE) && defined(PROT_READ) && defined(PROT_WRITE))
# undef HAVE_MPROTECT
#endif
-#if defined(HAVE_ALIGNED_MALLOC) && (defined(WINAPI_DESKTOP) || defined(HAVE_MPROTECT))
+#if defined(HAVE_ALIGNED_MALLOC) && \
+ (defined(WINAPI_DESKTOP) || defined(HAVE_MPROTECT))
# define HAVE_PAGE_PROTECTION
#endif
#if !defined(MADV_DODUMP) && defined(MADV_CORE)
# define MADV_DODUMP MADV_CORE
# define MADV_DONTDUMP MADV_NOCORE
#endif
-static size_t page_size;
+static size_t page_size;
static unsigned char canary[CANARY_SIZE];
#ifdef HAVE_WEAK_SYMBOLS
-__attribute__ ((weak)) void
-_sodium_memzero_as_a_weak_symbol_to_prevent_lto(void * const pnt, const size_t len)
+__attribute__((weak)) void
+_sodium_memzero_as_a_weak_symbol_to_prevent_lto(void *const pnt,
+ const size_t len)
{
- unsigned char *pnt_ = (unsigned char *) pnt;;
- size_t i = (size_t) 0U;
+ unsigned char *pnt_ = (unsigned char *) pnt;
+ size_t i = (size_t) 0U;
while (i < len) {
pnt_[i++] = 0U;
}
}
#endif
void
-sodium_memzero(void * const pnt, const size_t len)
+sodium_memzero(void *const pnt, const size_t len)
{
#ifdef _WIN32
SecureZeroMemory(pnt, len);
#elif defined(HAVE_MEMSET_S)
if (len > 0U && memset_s(pnt, (rsize_t) len, 0, (rsize_t) len) != 0) {
@@ -84,45 +89,45 @@
explicit_bzero(pnt, len);
#elif HAVE_WEAK_SYMBOLS
_sodium_memzero_as_a_weak_symbol_to_prevent_lto(pnt, len);
#else
volatile unsigned char *volatile pnt_ =
- (volatile unsigned char * volatile) pnt;
+ (volatile unsigned char *volatile) pnt;
size_t i = (size_t) 0U;
while (i < len) {
pnt_[i++] = 0U;
}
#endif
}
#ifdef HAVE_WEAK_SYMBOLS
-__attribute__ ((weak)) void
+__attribute__((weak)) void
_sodium_dummy_symbol_to_prevent_memcmp_lto(const unsigned char *b1,
const unsigned char *b2,
- const size_t len)
+ const size_t len)
{
(void) b1;
(void) b2;
(void) len;
}
#endif
int
-sodium_memcmp(const void * const b1_, const void * const b2_, size_t len)
+sodium_memcmp(const void *const b1_, const void *const b2_, size_t len)
{
#ifdef HAVE_WEAK_SYMBOLS
const unsigned char *b1 = (const unsigned char *) b1_;
const unsigned char *b2 = (const unsigned char *) b2_;
#else
const volatile unsigned char *volatile b1 =
- (const volatile unsigned char * volatile) b1_;
+ (const volatile unsigned char *volatile) b1_;
const volatile unsigned char *volatile b2 =
- (const volatile unsigned char * volatile) b2_;
+ (const volatile unsigned char *volatile) b2_;
#endif
- size_t i;
- unsigned char d = (unsigned char) 0U;
+ size_t i;
+ volatile unsigned char d = 0U;
#if HAVE_WEAK_SYMBOLS
_sodium_dummy_symbol_to_prevent_memcmp_lto(b1, b2, len);
#endif
for (i = 0U; i < len; i++) {
@@ -130,14 +135,14 @@
}
return (1 & ((d - 1) >> 8)) - 1;
}
#ifdef HAVE_WEAK_SYMBOLS
-__attribute__ ((weak)) void
+__attribute__((weak)) void
_sodium_dummy_symbol_to_prevent_compare_lto(const unsigned char *b1,
const unsigned char *b2,
- const size_t len)
+ const size_t len)
{
(void) b1;
(void) b2;
(void) len;
}
@@ -148,27 +153,30 @@
{
#ifdef HAVE_WEAK_SYMBOLS
const unsigned char *b1 = b1_;
const unsigned char *b2 = b2_;
#else
- const volatile unsigned char * volatile b1 =
- (const volatile unsigned char * volatile) b1_;
- const volatile unsigned char * volatile b2 =
- (const volatile unsigned char * volatile) b2_;
+ const volatile unsigned char *volatile b1 =
+ (const volatile unsigned char *volatile) b1_;
+ const volatile unsigned char *volatile b2 =
+ (const volatile unsigned char *volatile) b2_;
#endif
- unsigned char gt = 0U;
- unsigned char eq = 1U;
- size_t i;
+ size_t i;
+ volatile unsigned char gt = 0U;
+ volatile unsigned char eq = 1U;
+ volatile uint16_t x1, x2;
#if HAVE_WEAK_SYMBOLS
_sodium_dummy_symbol_to_prevent_compare_lto(b1, b2, len);
#endif
i = len;
while (i != 0U) {
i--;
- gt |= ((b2[i] - b1[i]) >> 8) & eq;
- eq &= ((b2[i] ^ b1[i]) - 1) >> 8;
+ x1 = b1[i];
+ x2 = b2[i];
+ gt |= ((x2 - x1) >> 8) & eq;
+ eq &= ((x2 ^ x1) - 1) >> 8;
}
return (int) (gt + gt + eq) - 1;
}
int
@@ -188,32 +196,34 @@
{
size_t i = 0U;
uint_fast16_t c = 1U;
#ifdef HAVE_AMD64_ASM
- uint64_t t64, t64_2;
- uint32_t t32;
+ uint64_t t64, t64_2;
+ uint32_t t32;
if (nlen == 12U) {
- __asm__ __volatile__("xorq %[t64], %[t64] \n"
- "xorl %[t32], %[t32] \n"
- "stc \n"
- "adcq %[t64], (%[out]) \n"
- "adcl %[t32], 8(%[out]) \n"
- : [t64] "=&r"(t64), [t32] "=&r" (t32)
- : [out] "D"(n)
- : "memory", "flags", "cc");
+ __asm__ __volatile__(
+ "xorq %[t64], %[t64] \n"
+ "xorl %[t32], %[t32] \n"
+ "stc \n"
+ "adcq %[t64], (%[out]) \n"
+ "adcl %[t32], 8(%[out]) \n"
+ : [t64] "=&r"(t64), [t32] "=&r"(t32)
+ : [out] "D"(n)
+ : "memory", "flags", "cc");
return;
} else if (nlen == 24U) {
- __asm__ __volatile__("movq $1, %[t64] \n"
- "xorq %[t64_2], %[t64_2] \n"
- "addq %[t64], (%[out]) \n"
- "adcq %[t64_2], 8(%[out]) \n"
- "adcq %[t64_2], 16(%[out]) \n"
- : [t64] "=&r"(t64), [t64_2] "=&r" (t64_2)
- : [out] "D"(n)
- : "memory", "flags", "cc");
+ __asm__ __volatile__(
+ "movq $1, %[t64] \n"
+ "xorq %[t64_2], %[t64_2] \n"
+ "addq %[t64], (%[out]) \n"
+ "adcq %[t64_2], 8(%[out]) \n"
+ "adcq %[t64_2], 16(%[out]) \n"
+ : [t64] "=&r"(t64), [t64_2] "=&r"(t64_2)
+ : [out] "D"(n)
+ : "memory", "flags", "cc");
return;
} else if (nlen == 8U) {
__asm__ __volatile__("incq (%[out]) \n"
:
: [out] "D"(n)
@@ -233,39 +243,42 @@
{
size_t i = 0U;
uint_fast16_t c = 0U;
#ifdef HAVE_AMD64_ASM
- uint64_t t64, t64_2, t64_3;
- uint32_t t32;
+ uint64_t t64, t64_2, t64_3;
+ uint32_t t32;
if (len == 12U) {
- __asm__ __volatile__("movq (%[in]), %[t64] \n"
- "movl 8(%[in]), %[t32] \n"
- "addq %[t64], (%[out]) \n"
- "adcl %[t32], 8(%[out]) \n"
- : [t64] "=&r"(t64), [t32] "=&r" (t32)
- : [in] "S"(b), [out] "D"(a)
- : "memory", "flags", "cc");
+ __asm__ __volatile__(
+ "movq (%[in]), %[t64] \n"
+ "movl 8(%[in]), %[t32] \n"
+ "addq %[t64], (%[out]) \n"
+ "adcl %[t32], 8(%[out]) \n"
+ : [t64] "=&r"(t64), [t32] "=&r"(t32)
+ : [in] "S"(b), [out] "D"(a)
+ : "memory", "flags", "cc");
return;
} else if (len == 24U) {
- __asm__ __volatile__("movq (%[in]), %[t64] \n"
- "movq 8(%[in]), %[t64_2] \n"
- "movq 16(%[in]), %[t64_3] \n"
- "addq %[t64], (%[out]) \n"
- "adcq %[t64_2], 8(%[out]) \n"
- "adcq %[t64_3], 16(%[out]) \n"
- : [t64] "=&r"(t64), [t64_2] "=&r"(t64_2), [t64_3] "=&r"(t64_3)
- : [in] "S"(b), [out] "D"(a)
- : "memory", "flags", "cc");
+ __asm__ __volatile__(
+ "movq (%[in]), %[t64] \n"
+ "movq 8(%[in]), %[t64_2] \n"
+ "movq 16(%[in]), %[t64_3] \n"
+ "addq %[t64], (%[out]) \n"
+ "adcq %[t64_2], 8(%[out]) \n"
+ "adcq %[t64_3], 16(%[out]) \n"
+ : [t64] "=&r"(t64), [t64_2] "=&r"(t64_2), [t64_3] "=&r"(t64_3)
+ : [in] "S"(b), [out] "D"(a)
+ : "memory", "flags", "cc");
return;
} else if (len == 8U) {
- __asm__ __volatile__("movq (%[in]), %[t64] \n"
- "addq %[t64], (%[out]) \n"
- : [t64] "=&r"(t64)
- : [in] "S"(b), [out] "D"(a)
- : "memory", "flags", "cc");
+ __asm__ __volatile__(
+ "movq (%[in]), %[t64] \n"
+ "addq %[t64], (%[out]) \n"
+ : [t64] "=&r"(t64)
+ : [in] "S"(b), [out] "D"(a)
+ : "memory", "flags", "cc");
return;
}
#endif
for (; i < len; i++) {
c += (uint_fast16_t) a[i] + (uint_fast16_t) b[i];
@@ -274,12 +287,12 @@
}
}
/* Derived from original code by CodesInChaos */
char *
-sodium_bin2hex(char * const hex, const size_t hex_maxlen,
- const unsigned char * const bin, const size_t bin_len)
+sodium_bin2hex(char *const hex, const size_t hex_maxlen,
+ const unsigned char *const bin, const size_t bin_len)
{
size_t i = (size_t) 0U;
unsigned int x;
int b;
int c;
@@ -301,41 +314,41 @@
return hex;
}
int
-sodium_hex2bin(unsigned char * const bin, const size_t bin_maxlen,
- const char * const hex, const size_t hex_len,
- const char * const ignore, size_t * const bin_len,
- const char ** const hex_end)
+sodium_hex2bin(unsigned char *const bin, const size_t bin_maxlen,
+ const char *const hex, const size_t hex_len,
+ const char *const ignore, size_t *const bin_len,
+ const char **const hex_end)
{
size_t bin_pos = (size_t) 0U;
size_t hex_pos = (size_t) 0U;
- int ret = 0;
+ int ret = 0;
unsigned char c;
unsigned char c_acc = 0U;
unsigned char c_alpha0, c_alpha;
unsigned char c_num0, c_num;
unsigned char c_val;
unsigned char state = 0U;
while (hex_pos < hex_len) {
- c = (unsigned char) hex[hex_pos];
- c_num = c ^ 48U;
- c_num0 = (c_num - 10U) >> 8;
- c_alpha = (c & ~32U) - 55U;
+ c = (unsigned char) hex[hex_pos];
+ c_num = c ^ 48U;
+ c_num0 = (c_num - 10U) >> 8;
+ c_alpha = (c & ~32U) - 55U;
c_alpha0 = ((c_alpha - 10U) ^ (c_alpha - 16U)) >> 8;
if ((c_num0 | c_alpha0) == 0U) {
if (ignore != NULL && state == 0U && strchr(ignore, c) != NULL) {
hex_pos++;
continue;
}
break;
}
c_val = (c_num0 & c_num) | (c_alpha0 & c_alpha);
if (bin_pos >= bin_maxlen) {
- ret = -1;
+ ret = -1;
errno = ERANGE;
break;
}
if (state == 0U) {
c_acc = c_val * 16U;
@@ -379,11 +392,11 @@
return 0;
}
int
-sodium_mlock(void * const addr, const size_t len)
+sodium_mlock(void *const addr, const size_t len)
{
#if defined(MADV_DONTDUMP) && defined(HAVE_MADVISE)
(void) madvise(addr, len, MADV_DONTDUMP);
#endif
#ifdef HAVE_MLOCK
@@ -395,11 +408,11 @@
return -1;
#endif
}
int
-sodium_munlock(void * const addr, const size_t len)
+sodium_munlock(void *const addr, const size_t len)
{
sodium_memzero(addr, len);
#if defined(MADV_DODUMP) && defined(HAVE_MADVISE)
(void) madvise(addr, len, MADV_DODUMP);
#endif
@@ -455,11 +468,11 @@
#endif
}
#ifdef HAVE_ALIGNED_MALLOC
-__attribute__ ((noreturn)) static void
+__attribute__((noreturn)) static void
_out_of_bounds(void)
{
# ifdef SIGSEGV
raise(SIGSEGV);
# elif defined(SIGKILL)
@@ -474,48 +487,49 @@
const size_t page_mask = page_size - 1U;
return (size + page_mask) & ~page_mask;
}
-static __attribute__ ((malloc)) unsigned char *
+static __attribute__((malloc)) unsigned char *
_alloc_aligned(const size_t size)
{
void *ptr;
# if defined(MAP_ANON) && defined(HAVE_MMAP)
if ((ptr = mmap(NULL, size, PROT_READ | PROT_WRITE,
- MAP_ANON | MAP_PRIVATE | MAP_NOCORE, -1, 0)) == MAP_FAILED) {
+ MAP_ANON | MAP_PRIVATE | MAP_NOCORE, -1, 0)) ==
+ MAP_FAILED) {
ptr = NULL; /* LCOV_EXCL_LINE */
- } /* LCOV_EXCL_LINE */
+ } /* LCOV_EXCL_LINE */
# elif defined(HAVE_POSIX_MEMALIGN)
if (posix_memalign(&ptr, page_size, size) != 0) {
ptr = NULL; /* LCOV_EXCL_LINE */
- } /* LCOV_EXCL_LINE */
+ } /* LCOV_EXCL_LINE */
# elif defined(WINAPI_DESKTOP)
ptr = VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
# else
# error Bug
# endif
return (unsigned char *) ptr;
}
static void
-_free_aligned(unsigned char * const ptr, const size_t size)
+_free_aligned(unsigned char *const ptr, const size_t size)
{
# if defined(MAP_ANON) && defined(HAVE_MMAP)
(void) munmap(ptr, size);
# elif defined(HAVE_POSIX_MEMALIGN)
free(ptr);
# elif defined(WINAPI_DESKTOP)
VirtualFree(ptr, 0U, MEM_RELEASE);
# else
# error Bug
-# endif
+#endif
}
static unsigned char *
-_unprotected_ptr_from_user_ptr(void * const ptr)
+_unprotected_ptr_from_user_ptr(void *const ptr)
{
uintptr_t unprotected_ptr_u;
unsigned char *canary_ptr;
size_t page_mask;
@@ -529,17 +543,17 @@
}
#endif /* HAVE_ALIGNED_MALLOC */
#ifndef HAVE_ALIGNED_MALLOC
-static __attribute__ ((malloc)) void *
+static __attribute__((malloc)) void *
_sodium_malloc(const size_t size)
{
return malloc(size > (size_t) 0U ? size : (size_t) 1U);
}
#else
-static __attribute__ ((malloc)) void *
+static __attribute__((malloc)) void *
_sodium_malloc(const size_t size)
{
void *user_ptr;
unsigned char *base_ptr;
unsigned char *canary_ptr;
@@ -555,34 +569,34 @@
if (page_size <= sizeof canary || page_size < sizeof unprotected_size) {
abort(); /* LCOV_EXCL_LINE */
}
size_with_canary = (sizeof canary) + size;
unprotected_size = _page_round(size_with_canary);
- total_size = page_size + page_size + unprotected_size + page_size;
+ total_size = page_size + page_size + unprotected_size + page_size;
if ((base_ptr = _alloc_aligned(total_size)) == NULL) {
return NULL; /* LCOV_EXCL_LINE */
}
unprotected_ptr = base_ptr + page_size * 2U;
_mprotect_noaccess(base_ptr + page_size, page_size);
# ifndef HAVE_PAGE_PROTECTION
memcpy(unprotected_ptr + unprotected_size, canary, sizeof canary);
# endif
_mprotect_noaccess(unprotected_ptr + unprotected_size, page_size);
sodium_mlock(unprotected_ptr, unprotected_size);
- canary_ptr = unprotected_ptr + _page_round(size_with_canary) -
- size_with_canary;
+ canary_ptr =
+ unprotected_ptr + _page_round(size_with_canary) - size_with_canary;
user_ptr = canary_ptr + sizeof canary;
memcpy(canary_ptr, canary, sizeof canary);
memcpy(base_ptr, &unprotected_size, sizeof unprotected_size);
_mprotect_readonly(base_ptr, page_size);
assert(_unprotected_ptr_from_user_ptr(user_ptr) == unprotected_ptr);
return user_ptr;
}
#endif /* !HAVE_ALIGNED_MALLOC */
-__attribute__ ((malloc)) void *
+__attribute__((malloc)) void *
sodium_malloc(const size_t size)
{
void *ptr;
if ((ptr = _sodium_malloc(size)) == NULL) {
@@ -591,11 +605,11 @@
memset(ptr, (int) GARBAGE_VALUE, size);
return ptr;
}
-__attribute__ ((malloc)) void *
+__attribute__((malloc)) void *
sodium_allocarray(size_t count, size_t size)
{
size_t total_size;
if (count > (size_t) 0U && size >= (size_t) SIZE_MAX / count) {
@@ -624,22 +638,22 @@
size_t unprotected_size;
if (ptr == NULL) {
return;
}
- canary_ptr = ((unsigned char *) ptr) - sizeof canary;
+ canary_ptr = ((unsigned char *) ptr) - sizeof canary;
unprotected_ptr = _unprotected_ptr_from_user_ptr(ptr);
- base_ptr = unprotected_ptr - page_size * 2U;
+ base_ptr = unprotected_ptr - page_size * 2U;
memcpy(&unprotected_size, base_ptr, sizeof unprotected_size);
total_size = page_size + page_size + unprotected_size + page_size;
_mprotect_readwrite(base_ptr, total_size);
if (sodium_memcmp(canary_ptr, canary, sizeof canary) != 0) {
_out_of_bounds();
}
# ifndef HAVE_PAGE_PROTECTION
- if (sodium_memcmp(unprotected_ptr + unprotected_size,
- canary, sizeof canary) != 0) {
+ if (sodium_memcmp(unprotected_ptr + unprotected_size, canary,
+ sizeof canary) != 0) {
_out_of_bounds();
}
# endif
sodium_munlock(unprotected_ptr, unprotected_size);
_free_aligned(base_ptr, total_size);
@@ -662,10 +676,10 @@
unsigned char *base_ptr;
unsigned char *unprotected_ptr;
size_t unprotected_size;
unprotected_ptr = _unprotected_ptr_from_user_ptr(ptr);
- base_ptr = unprotected_ptr - page_size * 2U;
+ base_ptr = unprotected_ptr - page_size * 2U;
memcpy(&unprotected_size, base_ptr, sizeof unprotected_size);
return cb(unprotected_ptr, unprotected_size);
}
#endif