2002-01-04 02:12:29 +03:00
|
|
|
/*
|
|
|
|
* Tiny C Memory and bounds checker
|
2015-07-29 23:53:57 +03:00
|
|
|
*
|
2002-01-04 02:12:29 +03:00
|
|
|
* Copyright (c) 2002 Fabrice Bellard
|
|
|
|
*
|
2007-11-14 20:34:30 +03:00
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
2002-01-04 02:12:29 +03:00
|
|
|
*
|
2007-11-14 20:34:30 +03:00
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
2002-01-04 02:12:29 +03:00
|
|
|
*
|
2007-11-14 20:34:30 +03:00
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
2002-01-04 02:12:29 +03:00
|
|
|
*/
|
2002-07-25 02:11:56 +04:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdarg.h>
|
|
|
|
#include <string.h>
|
2020-05-23 21:02:41 +03:00
|
|
|
#include <setjmp.h>
|
2017-07-23 22:24:11 +03:00
|
|
|
|
|
|
|
#if !defined(__FreeBSD__) \
|
|
|
|
&& !defined(__FreeBSD_kernel__) \
|
|
|
|
&& !defined(__DragonFly__) \
|
|
|
|
&& !defined(__OpenBSD__) \
|
|
|
|
&& !defined(__NetBSD__)
|
2002-07-25 02:11:56 +04:00
|
|
|
#include <malloc.h>
|
2002-12-08 17:34:30 +03:00
|
|
|
#endif
|
2017-07-23 22:24:11 +03:00
|
|
|
|
2012-12-10 05:51:49 +04:00
|
|
|
#if !defined(_WIN32)
|
lib/bcheck: Don't assume heap goes right after bss
At startup __bound_init() wants to mark malloc zone as invalid memory,
so that any access to memory on heap, not allocated through malloc be
invalid. Other pages are initialized as empty regions, access to which
is not treated as invalid by bounds-checking.
The problem is code incorrectly assumed that heap goes right after bss,
and that is not correct for two cases:
1) if we are running from `tcc -b -run`, program text data and bss
will be already in malloced memory, possibly in mmaped region
insead of heap, and marking memory as invalid from _end
will not cover heap and probably wrongly mark correct regions.
2) if address space randomization is turned on, again heap does not
start from _end, and we'll mark as invalid something else instead
of malloc area.
For example with the following diagnostic patch ...
diff --git a/tcc.c b/tcc.c
index 5dd5725..31c46e8 100644
--- a/tcc.c
+++ b/tcc.c
@@ -479,6 +479,8 @@ static int parse_args(TCCState *s, int argc, char **argv)
return optind;
}
+extern int _etext, _edata, _end;
+
int main(int argc, char **argv)
{
int i;
@@ -487,6 +489,18 @@ int main(int argc, char **argv)
int64_t start_time = 0;
const char *default_file = NULL;
+ void *brk;
+
+ brk = sbrk(0);
+
+ fprintf(stderr, "\n>>> TCC\n\n");
+ fprintf(stderr, "etext:\t%10p\n", &_etext);
+ fprintf(stderr, "edata:\t%10p\n", &_edata);
+ fprintf(stderr, "end:\t%10p\n", &_end);
+ fprintf(stderr, "brk:\t%10p\n", brk);
+ fprintf(stderr, "stack:\t%10p\n", &brk);
+
+ fprintf(stderr, "&errno: %p\n", &errno);
s = tcc_new();
output_type = TCC_OUTPUT_EXE;
diff --git a/tccrun.c b/tccrun.c
index 531f46a..25ed30a 100644
--- a/tccrun.c
+++ b/tccrun.c
@@ -91,6 +91,8 @@ LIBTCCAPI int tcc_run(TCCState *s1, int argc, char **argv)
int (*prog_main)(int, char **);
int ret;
+ fprintf(stderr, "\n\ntcc_run() ...\n\n");
+
if (tcc_relocate(s1, TCC_RELOCATE_AUTO) < 0)
return -1;
diff --git a/lib/bcheck.c b/lib/bcheck.c
index ea5b233..8b26a5f 100644
--- a/lib/bcheck.c
+++ b/lib/bcheck.c
@@ -296,6 +326,8 @@ static void mark_invalid(unsigned long addr, unsigned long size)
start = addr;
end = addr + size;
+ fprintf(stderr, "mark_invalid %10p - %10p\n", (void *)addr, (void *)end);
+
t2_start = (start + BOUND_T3_SIZE - 1) >> BOUND_T3_BITS;
if (end != 0)
t2_end = end >> BOUND_T3_BITS;
... Look how memory is laid out for `tcc -b -run ...`:
$ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \
-DONE_SOURCE ./tcc.c -B. -c x.c
>>> TCC
etext: 0x8065477
edata: 0x8070220
end: 0x807a95c
brk: 0x807b000
stack: 0xaffff0f0
&errno: 0xa7e25688
tcc_run() ...
mark_invalid 0xfff80000 - (nil)
mark_invalid 0xa7c31d98 - 0xafc31d98
>>> TCC
etext: 0xa7c22767
edata: 0xa7c2759c
end: 0xa7c31d98
brk: 0x8211000
stack: 0xafffeff0
&errno: 0xa7e25688
Runtime error: dereferencing invalid pointer
./tccpp.c:1953: at 0xa7beebdf parse_number() (included from ./libtcc.c, ./tcc.c)
./tccpp.c:3003: by 0xa7bf0708 next() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:4465: by 0xa7bfe348 block() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:4440: by 0xa7bfe212 block() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:5529: by 0xa7c01929 gen_function() (included from ./libtcc.c, ./tcc.c)
./tccgen.c:5767: by 0xa7c02602 decl0() (included from ./libtcc.c, ./tcc.c)
The second mark_invalid goes right after in-memory-compiled program's
_end, and oops, that's not where malloc zone is (starts from brk), and oops
again, mark_invalid covers e.g. errno. Then compiled tcc is crasshing by
bcheck on errno access:
1776 static void parse_number(const char *p)
1777 {
1778 int b, t, shift, frac_bits, s, exp_val, ch;
...
1951 *q = '\0';
1952 t = toup(ch);
1953 errno = 0;
The solution here is to use sbrk(0) as approximation for the program
break start instead of &_end:
- if we are a separately compiled program, __bound_init() runs early,
and sbrk(0) should be equal or very near to start_brk (in case other
constructors malloc something), or
- if we are running from under `tcc -b -run`, sbrk(0) will return
start of heap portion which is under this program control, and not
mark as invalid earlier allocated memory.
With this patch `tcc -b -run tcc.c ...` succeeds compiling above
small-test program (diagnostic patch is still applied too):
$ ./tcc -B. -b -DTCC_TARGET_I386 -DCONFIG_MULTIARCHDIR=\"i386-linux-gnu\" -run \
-DONE_SOURCE ./tcc.c -B. -c x.c
>>> TCC
etext: 0x8065477
edata: 0x8070220
end: 0x807a95c
brk: 0x807b000
stack: 0xaffff0f0
&errno: 0xa7e25688
tcc_run() ...
mark_invalid 0xfff80000 - (nil)
mark_invalid 0x8211000 - 0x10211000
>>> TCC
etext: 0xa7c22777
edata: 0xa7c275ac
end: 0xa7c31da8
brk: 0x8211000
stack: 0xafffeff0
&errno: 0xa7e25688
(completes ok)
but running `tcc -b -run tcc.c -run tests/tcctest.c` sigsegv's - that's
the plot for the next patch.
2012-12-09 18:48:48 +04:00
|
|
|
#include <unistd.h>
|
2020-05-23 21:02:41 +03:00
|
|
|
#include <sys/syscall.h>
|
2012-12-10 05:51:49 +04:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#define BOUND_DEBUG (1)
|
|
|
|
#define BOUND_STATISTIC (1)
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#if BOUND_DEBUG
|
|
|
|
#define dprintf(a...) if (print_calls) fprintf(a)
|
2015-04-10 15:17:22 +03:00
|
|
|
#else
|
|
|
|
#define dprintf(a...)
|
|
|
|
#endif
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#ifdef __attribute__
|
|
|
|
/* an __attribute__ macro is defined in the system headers */
|
|
|
|
#undef __attribute__
|
|
|
|
#endif
|
|
|
|
#define FASTCALL __attribute__((regparm(3)))
|
2002-12-08 17:34:30 +03:00
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
#ifdef _WIN32
|
|
|
|
# define DLL_EXPORT __declspec(dllexport)
|
|
|
|
#else
|
|
|
|
# define DLL_EXPORT
|
|
|
|
#endif
|
|
|
|
|
2017-07-23 22:24:11 +03:00
|
|
|
#if defined(__FreeBSD__) \
|
|
|
|
|| defined(__FreeBSD_kernel__) \
|
|
|
|
|| defined(__DragonFly__) \
|
|
|
|
|| defined(__OpenBSD__) \
|
|
|
|
|| defined(__NetBSD__) \
|
2019-12-12 22:49:35 +03:00
|
|
|
|| defined(__dietlibc__)
|
2020-01-15 10:53:19 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#define INIT_SEM()
|
|
|
|
#define EXIT_SEM()
|
|
|
|
#define WAIT_SEM()
|
|
|
|
#define POST_SEM()
|
2020-01-15 10:53:19 +03:00
|
|
|
#define HAVE_MEMALIGN (0)
|
|
|
|
#define HAS_ENVIRON (0)
|
|
|
|
#define MALLOC_REDIR (0)
|
|
|
|
#define HAVE_PTHREAD_CREATE (0)
|
|
|
|
#define HAVE_CTYPE (0)
|
|
|
|
#define HAVE_ERRNO (0)
|
|
|
|
|
2019-12-12 22:49:35 +03:00
|
|
|
#elif defined(_WIN32)
|
2020-01-15 10:53:19 +03:00
|
|
|
|
2019-12-12 22:49:35 +03:00
|
|
|
#include <windows.h>
|
|
|
|
static CRITICAL_SECTION bounds_sem;
|
2020-01-15 10:53:19 +03:00
|
|
|
#define INIT_SEM() InitializeCriticalSection(&bounds_sem)
|
|
|
|
#define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
|
|
|
|
#define WAIT_SEM() EnterCriticalSection(&bounds_sem)
|
|
|
|
#define POST_SEM() LeaveCriticalSection(&bounds_sem)
|
|
|
|
#define HAVE_MEMALIGN (0)
|
|
|
|
#define HAS_ENVIRON (0)
|
|
|
|
#define MALLOC_REDIR (0)
|
|
|
|
#define HAVE_PTHREAD_CREATE (0)
|
|
|
|
#define HAVE_CTYPE (0)
|
|
|
|
#define HAVE_ERRNO (0)
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#else
|
2020-01-15 10:53:19 +03:00
|
|
|
|
|
|
|
#define __USE_GNU /* get RTLD_NEXT */
|
2019-12-10 10:07:25 +03:00
|
|
|
#include <sys/mman.h>
|
2020-01-15 10:53:19 +03:00
|
|
|
#include <ctype.h>
|
|
|
|
#include <pthread.h>
|
|
|
|
#include <dlfcn.h>
|
2019-12-10 10:07:25 +03:00
|
|
|
#include <errno.h>
|
2020-01-15 10:53:19 +03:00
|
|
|
#if 0
|
2019-12-10 10:07:25 +03:00
|
|
|
#include <semaphore.h>
|
|
|
|
static sem_t bounds_sem;
|
2020-01-15 10:53:19 +03:00
|
|
|
#define INIT_SEM() sem_init (&bounds_sem, 0, 1)
|
|
|
|
#define EXIT_SEM() sem_destroy (&bounds_sem)
|
|
|
|
#define WAIT_SEM() if (use_sem) while (sem_wait (&bounds_sem) < 0 \
|
|
|
|
&& errno == EINTR)
|
|
|
|
#define POST_SEM() if (use_sem) sem_post (&bounds_sem)
|
|
|
|
#else
|
|
|
|
static pthread_spinlock_t bounds_spin;
|
|
|
|
/* about 25% faster then semaphore. */
|
|
|
|
#define INIT_SEM() pthread_spin_init (&bounds_spin, 0)
|
|
|
|
#define EXIT_SEM() pthread_spin_destroy (&bounds_spin)
|
|
|
|
#define WAIT_SEM() if (use_sem) pthread_spin_lock (&bounds_spin)
|
|
|
|
#define POST_SEM() if (use_sem) pthread_spin_unlock (&bounds_spin)
|
|
|
|
#endif
|
|
|
|
#define HAVE_MEMALIGN (1)
|
|
|
|
#define HAS_ENVIRON (1)
|
|
|
|
#define MALLOC_REDIR (1)
|
|
|
|
#define HAVE_PTHREAD_CREATE (1)
|
|
|
|
#define HAVE_CTYPE (1)
|
|
|
|
#define HAVE_ERRNO (1)
|
|
|
|
|
|
|
|
static void *(*malloc_redir) (size_t);
|
|
|
|
static void *(*calloc_redir) (size_t, size_t);
|
|
|
|
static void (*free_redir) (void *);
|
|
|
|
static void *(*realloc_redir) (void *, size_t);
|
|
|
|
static void *(*memalign_redir) (size_t, size_t);
|
|
|
|
static int (*pthread_create_redir) (pthread_t *thread,
|
|
|
|
const pthread_attr_t *attr,
|
|
|
|
void *(*start_routine)(void *), void *arg);
|
|
|
|
static unsigned int pool_index;
|
|
|
|
static unsigned char __attribute__((aligned(16))) initial_pool[256];
|
|
|
|
static unsigned char use_sem;
|
|
|
|
|
2002-12-08 17:34:30 +03:00
|
|
|
#endif
|
|
|
|
|
2019-12-10 21:47:33 +03:00
|
|
|
#define TCC_TYPE_NONE (0)
|
|
|
|
#define TCC_TYPE_MALLOC (1)
|
|
|
|
#define TCC_TYPE_CALLOC (2)
|
|
|
|
#define TCC_TYPE_REALLOC (3)
|
|
|
|
#define TCC_TYPE_MEMALIGN (4)
|
2019-12-12 22:49:35 +03:00
|
|
|
#define TCC_TYPE_STRDUP (5)
|
2019-12-10 21:47:33 +03:00
|
|
|
|
2002-01-04 02:12:29 +03:00
|
|
|
/* this pointer is generated when bound check is incorrect */
|
|
|
|
#define INVALID_POINTER ((void *)(-2))
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
typedef struct tree_node Tree;
|
|
|
|
struct tree_node {
|
|
|
|
Tree * left, * right;
|
2015-03-26 07:47:45 +03:00
|
|
|
size_t start;
|
|
|
|
size_t size;
|
2020-01-15 10:53:19 +03:00
|
|
|
unsigned char type;
|
|
|
|
unsigned char is_invalid; /* true if pointers outside region are invalid */
|
2019-12-10 10:07:25 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct alloca_list_struct {
|
|
|
|
size_t fp;
|
|
|
|
void *p;
|
|
|
|
struct alloca_list_struct *next;
|
|
|
|
} alloca_list_type;
|
|
|
|
|
2020-05-23 21:02:41 +03:00
|
|
|
#if defined(_WIN32)
|
|
|
|
#define BOUND_TID_TYPE DWORD
|
|
|
|
#define BOUND_GET_TID GetCurrentThreadId()
|
|
|
|
#elif defined(__i386__) || defined(__x86_64__)
|
|
|
|
#define BOUND_TID_TYPE pid_t
|
|
|
|
#define BOUND_GET_TID syscall (SYS_gettid)
|
|
|
|
#else
|
|
|
|
#define BOUND_TID_TYPE int
|
|
|
|
#define BOUND_GET_TID 0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
typedef struct jmp_list_struct {
|
|
|
|
void *penv;
|
|
|
|
size_t fp;
|
|
|
|
size_t end_fp;
|
|
|
|
BOUND_TID_TYPE tid;
|
|
|
|
struct jmp_list_struct *next;
|
|
|
|
} jmp_list_type;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#define BOUND_STATISTIC_SPLAY (0)
|
2019-12-10 10:07:25 +03:00
|
|
|
static Tree * splay (size_t addr, Tree *t);
|
|
|
|
static Tree * splay_end (size_t addr, Tree *t);
|
|
|
|
static Tree * splay_insert(size_t addr, size_t size, Tree * t);
|
|
|
|
static Tree * splay_delete(size_t addr, Tree *t);
|
|
|
|
void splay_printtree(Tree * t, int d);
|
2002-01-04 02:12:29 +03:00
|
|
|
|
|
|
|
/* external interface */
|
2020-01-15 10:53:19 +03:00
|
|
|
void __bound_checking (int no_check);
|
|
|
|
void __bound_never_fatal (int no_check);
|
2020-01-18 00:58:39 +03:00
|
|
|
DLL_EXPORT void * __bound_ptr_add(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir1(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir2(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir4(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir8(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir12(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void * __bound_ptr_indir16(void *p, size_t offset);
|
|
|
|
DLL_EXPORT void FASTCALL __bound_local_new(void *p1);
|
|
|
|
DLL_EXPORT void FASTCALL __bound_local_delete(void *p1);
|
|
|
|
void __bound_init(size_t *);
|
2020-01-15 10:53:19 +03:00
|
|
|
void __bound_main_arg(char **p);
|
2019-12-10 21:47:33 +03:00
|
|
|
void __bound_exit(void);
|
2020-01-15 10:53:19 +03:00
|
|
|
#if !defined(_WIN32)
|
|
|
|
void *__bound_mmap (void *start, size_t size, int prot, int flags, int fd,
|
|
|
|
off_t offset);
|
|
|
|
int __bound_munmap (void *start, size_t size);
|
2020-05-25 13:26:55 +03:00
|
|
|
DLL_EXPORT void __bound_siglongjmp(jmp_buf env, int val);
|
2015-03-29 11:28:02 +03:00
|
|
|
#endif
|
2020-01-18 00:58:39 +03:00
|
|
|
DLL_EXPORT void __bound_new_region(void *p, size_t size);
|
2020-05-23 21:02:41 +03:00
|
|
|
DLL_EXPORT void __bound_setjmp(jmp_buf env);
|
|
|
|
DLL_EXPORT void __bound_longjmp(jmp_buf env, int val);
|
2020-01-18 00:58:39 +03:00
|
|
|
DLL_EXPORT void *__bound_memcpy(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT int __bound_memcmp(const void *s1, const void *s2, size_t size);
|
|
|
|
DLL_EXPORT void *__bound_memmove(void *dst, const void *src, size_t size);
|
|
|
|
DLL_EXPORT void *__bound_memset(void *dst, int c, size_t size);
|
|
|
|
DLL_EXPORT int __bound_strlen(const char *s);
|
|
|
|
DLL_EXPORT char *__bound_strcpy(char *dst, const char *src);
|
|
|
|
DLL_EXPORT char *__bound_strncpy(char *dst, const char *src, size_t n);
|
|
|
|
DLL_EXPORT int __bound_strcmp(const char *s1, const char *s2);
|
|
|
|
DLL_EXPORT int __bound_strncmp(const char *s1, const char *s2, size_t n);
|
|
|
|
DLL_EXPORT char *__bound_strcat(char *dest, const char *src);
|
|
|
|
DLL_EXPORT char *__bound_strchr(const char *string, int ch);
|
|
|
|
DLL_EXPORT char *__bound_strdup(const char *s);
|
2002-01-05 03:41:11 +03:00
|
|
|
|
2020-05-23 21:02:41 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
#define BOUND_MALLOC(a) malloc_redir(a)
|
|
|
|
#define BOUND_MEMALIGN(a,b) memalign_redir(a,b)
|
|
|
|
#define BOUND_FREE(a) free_redir(a)
|
|
|
|
#define BOUND_REALLOC(a,b) realloc_redir(a,b)
|
|
|
|
#define BOUND_CALLOC(a,b) calloc_redir(a,b)
|
|
|
|
#else
|
|
|
|
#define BOUND_MALLOC(a) malloc(a)
|
|
|
|
#define BOUND_MEMALIGN(a,b) memalign(a,b)
|
|
|
|
#define BOUND_FREE(a) free(a)
|
|
|
|
#define BOUND_REALLOC(a,b) realloc(a,b)
|
|
|
|
#define BOUND_CALLOC(a,b) calloc(a,b)
|
2020-01-18 00:58:39 +03:00
|
|
|
DLL_EXPORT void *__bound_malloc(size_t size, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_memalign(size_t size, size_t align, const void *caller);
|
|
|
|
DLL_EXPORT void __bound_free(void *ptr, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_realloc(void *ptr, size_t size, const void *caller);
|
|
|
|
DLL_EXPORT void *__bound_calloc(size_t nmemb, size_t size);
|
2002-01-12 19:39:35 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#define FREE_REUSE_SIZE (100)
|
2020-01-15 10:53:19 +03:00
|
|
|
static unsigned int free_reuse_index;
|
2019-12-10 10:07:25 +03:00
|
|
|
static void *free_reuse_list[FREE_REUSE_SIZE];
|
|
|
|
|
|
|
|
static Tree *tree = NULL;
|
|
|
|
#define TREE_REUSE (1)
|
|
|
|
#if TREE_REUSE
|
2020-01-15 10:53:19 +03:00
|
|
|
static Tree *tree_free_list;
|
2002-01-04 02:12:29 +03:00
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
static alloca_list_type *alloca_list;
|
2020-05-23 21:02:41 +03:00
|
|
|
static jmp_list_type *jmp_list;
|
2020-01-15 10:53:19 +03:00
|
|
|
|
|
|
|
static unsigned char inited;
|
|
|
|
static unsigned char print_warn_ptr_add;
|
|
|
|
static unsigned char print_calls;
|
|
|
|
static unsigned char print_heap;
|
|
|
|
static unsigned char print_statistic;
|
|
|
|
static unsigned char no_strdup;
|
|
|
|
static signed char never_fatal;
|
|
|
|
static signed char no_checking = 1;
|
|
|
|
static char exec[100];
|
|
|
|
|
|
|
|
#if BOUND_STATISTIC
|
2019-12-13 12:02:20 +03:00
|
|
|
static unsigned long long bound_ptr_add_count;
|
|
|
|
static unsigned long long bound_ptr_indir1_count;
|
|
|
|
static unsigned long long bound_ptr_indir2_count;
|
|
|
|
static unsigned long long bound_ptr_indir4_count;
|
|
|
|
static unsigned long long bound_ptr_indir8_count;
|
|
|
|
static unsigned long long bound_ptr_indir12_count;
|
|
|
|
static unsigned long long bound_ptr_indir16_count;
|
|
|
|
static unsigned long long bound_local_new_count;
|
|
|
|
static unsigned long long bound_local_delete_count;
|
|
|
|
static unsigned long long bound_malloc_count;
|
|
|
|
static unsigned long long bound_calloc_count;
|
|
|
|
static unsigned long long bound_realloc_count;
|
|
|
|
static unsigned long long bound_free_count;
|
|
|
|
static unsigned long long bound_memalign_count;
|
|
|
|
static unsigned long long bound_mmap_count;
|
|
|
|
static unsigned long long bound_munmap_count;
|
|
|
|
static unsigned long long bound_alloca_count;
|
2020-05-23 21:02:41 +03:00
|
|
|
static unsigned long long bound_setjmp_count;
|
|
|
|
static unsigned long long bound_longjmp_count;
|
2019-12-13 12:02:20 +03:00
|
|
|
static unsigned long long bound_mempcy_count;
|
|
|
|
static unsigned long long bound_memcmp_count;
|
|
|
|
static unsigned long long bound_memmove_count;
|
|
|
|
static unsigned long long bound_memset_count;
|
|
|
|
static unsigned long long bound_strlen_count;
|
|
|
|
static unsigned long long bound_strcpy_count;
|
|
|
|
static unsigned long long bound_strncpy_count;
|
|
|
|
static unsigned long long bound_strcmp_count;
|
|
|
|
static unsigned long long bound_strncmp_count;
|
|
|
|
static unsigned long long bound_strcat_count;
|
|
|
|
static unsigned long long bound_strchr_count;
|
|
|
|
static unsigned long long bound_strdup_count;
|
2020-01-15 10:53:19 +03:00
|
|
|
static unsigned long long bound_not_found;
|
|
|
|
#define INCR_COUNT(x) ++x
|
2019-12-12 22:49:35 +03:00
|
|
|
#else
|
|
|
|
#define INCR_COUNT(x)
|
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
#if BOUND_STATISTIC_SPLAY
|
|
|
|
static unsigned long long bound_splay;
|
|
|
|
static unsigned long long bound_splay_end;
|
|
|
|
static unsigned long long bound_splay_insert;
|
|
|
|
static unsigned long long bound_splay_delete;
|
|
|
|
#define INCR_COUNT_SPLAY(x) ++x
|
|
|
|
#else
|
|
|
|
#define INCR_COUNT_SPLAY(x)
|
|
|
|
#endif
|
2019-12-12 22:49:35 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
/* currently only i386/x86_64 supported. Change for other platforms */
|
|
|
|
static void fetch_and_add(signed char* variable, signed char value)
|
|
|
|
{
|
|
|
|
#if defined __i386__ || defined __x86_64__
|
|
|
|
__asm__ volatile("lock; addb %0, %1"
|
|
|
|
: "+r" (value), "+m" (*variable) // input+output
|
|
|
|
: // No input-only
|
|
|
|
: "memory"
|
|
|
|
);
|
|
|
|
#else
|
|
|
|
*variable += value;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
/* enable/disable checking. This can be used in signal handlers. */
|
2019-12-10 10:07:25 +03:00
|
|
|
void __bound_checking (int no_check)
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
fetch_and_add (&no_checking, no_check);
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
/* enable/disable checking. This can be used in signal handlers. */
|
|
|
|
void __bound_never_fatal (int neverfatal)
|
|
|
|
{
|
|
|
|
fetch_and_add (&never_fatal, neverfatal);
|
|
|
|
}
|
bcheck cleanup
- revert Makefiles to state before last bcheck additions
Instead, just load bcheck.o explicitly if that is
what is wanted.
- move tcc_add_bcheck() to the <target>-link.c files and
remove revently added arguments. This function is to
support tccelf.c with linking, not for tccgen.c to
support compilation.
- remove -ba option: It said:
"-ba Enable better address checking with bounds checker"
Okay, if it is better then to have it is not an option.
- remove va_copy. It is C99 and we try to stay C89 in tinycc
when possible. For example, MS compilers do not have va_copy.
- win64: revert any 'fixes' to alloca
It was correct as it was before, except for bound_checking
where it was not implemented. This should now work too.
- remove parasitic filename:linenum features
Such feature is already present with rt_printline in
tccrun.c. If it doesn't work it can be fixed.
- revert changes to gen_bounded_ptr_add()
gen_bounded_ptr_add() was working as it should before
(mostly). For the sake of simplicity I switched it to
CDECL. Anyway, FASTCALL means SLOWCALL with tinycc.
In exchange you get one addition which is required for
bounds_cnecking function arguments. The important thing
is to check them *BEFORE* they are loaded into registers.
New function gbound_args() does that.
In any case, code instrumentation with the bounds-check
functions as such now seems to work flawlessly again,
which means when they are inserted as NOPs, any code that
tcc can compile, seems to behave just the same as without
them.
What these functions then do when fully enabled, is a
differnt story. I did not touch this.
2019-12-12 17:45:45 +03:00
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
int tcc_backtrace(const char *fmt, ...);
|
|
|
|
|
2002-01-12 19:39:35 +03:00
|
|
|
/* print a bound error message */
|
2020-01-18 00:58:39 +03:00
|
|
|
#define bound_warning(...) \
|
|
|
|
tcc_backtrace("^bcheck.c^BCHECK: " __VA_ARGS__)
|
|
|
|
|
|
|
|
#define bound_error(...) \
|
|
|
|
do { \
|
|
|
|
bound_warning(__VA_ARGS__); \
|
|
|
|
if (never_fatal == 0) \
|
|
|
|
exit(255); \
|
|
|
|
} while (0)
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
static void bound_alloc_error(const char *s)
|
|
|
|
{
|
|
|
|
fprintf(stderr,"FATAL: %s\n",s);
|
|
|
|
exit (1);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void bound_not_found_warning(const char *file, const char *function,
|
|
|
|
void *ptr)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s%s, %s(): Not found %p\n", exec, file, function, ptr);
|
|
|
|
}
|
|
|
|
|
2002-01-05 19:16:47 +03:00
|
|
|
/* return '(p + offset)' for pointer arithmetic (a pointer can reach
|
|
|
|
the end of a region in this case */
|
2020-01-15 10:53:19 +03:00
|
|
|
void * __bound_ptr_add(void *p, size_t offset)
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2015-03-26 07:47:45 +03:00
|
|
|
size_t addr = (size_t)p;
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking)
|
2019-12-10 10:07:25 +03:00
|
|
|
return p + offset;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)offset);
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
WAIT_SEM ();
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_ptr_add_count);
|
2019-12-10 10:07:25 +03:00
|
|
|
if (tree) {
|
|
|
|
addr -= tree->start;
|
2019-12-13 12:02:20 +03:00
|
|
|
if (addr >= tree->size) {
|
|
|
|
addr = (size_t)p;
|
|
|
|
tree = splay (addr, tree);
|
|
|
|
addr -= tree->start;
|
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
if (addr >= tree->size) {
|
|
|
|
addr = (size_t)p;
|
|
|
|
tree = splay_end (addr, tree);
|
|
|
|
addr -= tree->start;
|
|
|
|
}
|
|
|
|
if (addr <= tree->size) {
|
2020-01-15 10:53:19 +03:00
|
|
|
if (tree->is_invalid || addr + offset > tree->size) {
|
|
|
|
POST_SEM ();
|
2020-01-19 13:15:12 +03:00
|
|
|
if (print_warn_ptr_add)
|
|
|
|
bound_warning("%p is outside of the region", p + offset);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (never_fatal <= 0)
|
2019-12-10 10:07:25 +03:00
|
|
|
return INVALID_POINTER; /* return an invalid pointer */
|
2020-01-15 10:53:19 +03:00
|
|
|
return p + offset;
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
else if (p) { /* Allow NULL + offset. offsetoff is using it. */
|
|
|
|
INCR_COUNT(bound_not_found);
|
|
|
|
POST_SEM ();
|
|
|
|
bound_not_found_warning (__FILE__, __FUNCTION__, p);
|
|
|
|
return p + offset;
|
|
|
|
}
|
2015-03-29 11:28:02 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
POST_SEM ();
|
2002-01-04 02:12:29 +03:00
|
|
|
return p + offset;
|
|
|
|
}
|
|
|
|
|
2002-01-05 19:16:47 +03:00
|
|
|
/* return '(p + offset)' for pointer indirection (the resulting must
|
|
|
|
be strictly inside the region */
|
2019-12-13 15:45:09 +03:00
|
|
|
#define BOUND_PTR_INDIR(dsize) \
|
2020-01-15 10:53:19 +03:00
|
|
|
void * __bound_ptr_indir ## dsize (void *p, size_t offset) \
|
2019-12-13 15:45:09 +03:00
|
|
|
{ \
|
|
|
|
size_t addr = (size_t)p; \
|
|
|
|
\
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking) \
|
2019-12-13 15:45:09 +03:00
|
|
|
return p + offset; \
|
2020-01-15 10:53:19 +03:00
|
|
|
\
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n", \
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)offset); \
|
2019-12-13 15:45:09 +03:00
|
|
|
WAIT_SEM (); \
|
|
|
|
INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
|
|
|
|
if (tree) { \
|
|
|
|
addr -= tree->start; \
|
|
|
|
if (addr >= tree->size) { \
|
|
|
|
addr = (size_t)p; \
|
|
|
|
tree = splay (addr, tree); \
|
|
|
|
addr -= tree->start; \
|
|
|
|
} \
|
|
|
|
if (addr >= tree->size) { \
|
|
|
|
addr = (size_t)p; \
|
|
|
|
tree = splay_end (addr, tree); \
|
|
|
|
addr -= tree->start; \
|
|
|
|
} \
|
|
|
|
if (addr <= tree->size) { \
|
2020-01-15 10:53:19 +03:00
|
|
|
if (tree->is_invalid || addr + offset + dsize > tree->size) { \
|
|
|
|
POST_SEM (); \
|
2020-01-18 00:58:39 +03:00
|
|
|
bound_warning("%p is outside of the region", p + offset); \
|
2020-01-15 10:53:19 +03:00
|
|
|
if (never_fatal <= 0) \
|
2019-12-13 15:45:09 +03:00
|
|
|
return INVALID_POINTER; /* return an invalid pointer */ \
|
2020-01-15 10:53:19 +03:00
|
|
|
return p + offset; \
|
2019-12-13 15:45:09 +03:00
|
|
|
} \
|
|
|
|
} \
|
2020-01-15 10:53:19 +03:00
|
|
|
else { \
|
|
|
|
INCR_COUNT(bound_not_found); \
|
|
|
|
POST_SEM (); \
|
|
|
|
bound_not_found_warning (__FILE__, __FUNCTION__, p); \
|
|
|
|
return p + offset; \
|
|
|
|
} \
|
2019-12-13 15:45:09 +03:00
|
|
|
} \
|
|
|
|
POST_SEM (); \
|
|
|
|
return p + offset; \
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2009-12-20 00:22:43 +03:00
|
|
|
BOUND_PTR_INDIR(1)
|
|
|
|
BOUND_PTR_INDIR(2)
|
|
|
|
BOUND_PTR_INDIR(4)
|
|
|
|
BOUND_PTR_INDIR(8)
|
|
|
|
BOUND_PTR_INDIR(12)
|
|
|
|
BOUND_PTR_INDIR(16)
|
|
|
|
|
2016-11-30 08:18:48 +03:00
|
|
|
#if defined(__GNUC__) && (__GNUC__ >= 6)
|
|
|
|
/*
|
2017-09-25 04:03:26 +03:00
|
|
|
* At least gcc 6.2 complains when __builtin_frame_address is used with
|
2016-11-30 08:18:48 +03:00
|
|
|
* nonzero argument.
|
|
|
|
*/
|
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wframe-address"
|
|
|
|
#endif
|
|
|
|
|
2002-01-05 20:03:56 +03:00
|
|
|
/* return the frame pointer of the caller */
|
2002-01-05 19:16:47 +03:00
|
|
|
#define GET_CALLER_FP(fp)\
|
|
|
|
{\
|
2015-03-26 07:47:45 +03:00
|
|
|
fp = (size_t)__builtin_frame_address(1);\
|
2002-01-05 19:16:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/* called when entering a function to add all the local regions */
|
2015-07-29 23:53:57 +03:00
|
|
|
void FASTCALL __bound_local_new(void *p1)
|
2002-01-05 19:16:47 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
size_t addr, fp, *p = p1;
|
2015-04-10 15:17:22 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
if (no_checking)
|
|
|
|
return;
|
2002-01-05 19:16:47 +03:00
|
|
|
GET_CALLER_FP(fp);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): p1=%p fp=%p\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (void *)fp);
|
2019-12-10 10:07:25 +03:00
|
|
|
WAIT_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
while ((addr = p[0])) {
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_local_new_count);
|
2020-01-18 00:58:39 +03:00
|
|
|
tree = splay_insert(addr + fp, p[1], tree);
|
2002-01-05 19:16:47 +03:00
|
|
|
p += 2;
|
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
POST_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = p1;
|
|
|
|
while ((addr = p[0])) {
|
2020-05-05 09:31:57 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) (addr + fp), (unsigned long) p[1]);
|
2020-01-15 10:53:19 +03:00
|
|
|
p += 2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2002-01-05 19:16:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/* called when leaving a function to delete all the local regions */
|
2015-07-29 23:53:57 +03:00
|
|
|
void FASTCALL __bound_local_delete(void *p1)
|
2002-01-05 19:16:47 +03:00
|
|
|
{
|
2015-03-26 07:47:45 +03:00
|
|
|
size_t addr, fp, *p = p1;
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
if (no_checking)
|
|
|
|
return;
|
2002-01-05 19:16:47 +03:00
|
|
|
GET_CALLER_FP(fp);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): p1=%p fp=%p\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__, p, (void *)fp);
|
|
|
|
WAIT_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
while ((addr = p[0])) {
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_local_delete_count);
|
2020-01-18 00:58:39 +03:00
|
|
|
tree = splay_delete(addr + fp, tree);
|
|
|
|
p += 2;
|
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
if (alloca_list) {
|
2020-05-23 21:02:41 +03:00
|
|
|
alloca_list_type *last = NULL;
|
|
|
|
alloca_list_type *cur = alloca_list;
|
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
do {
|
2020-05-23 21:02:41 +03:00
|
|
|
if (cur->fp == fp) {
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
|
|
|
tree = splay_delete ((size_t) cur->p, tree);
|
2020-05-25 13:26:55 +03:00
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->p);
|
|
|
|
BOUND_FREE (cur);
|
2020-05-23 21:02:41 +03:00
|
|
|
cur = last ? last->next : alloca_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
} while (cur);
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
if (jmp_list) {
|
2020-05-23 21:02:41 +03:00
|
|
|
jmp_list_type *last = NULL;
|
|
|
|
jmp_list_type *cur = jmp_list;
|
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
do {
|
2020-05-23 21:02:41 +03:00
|
|
|
if (cur->fp == fp) {
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
jmp_list = cur->next;
|
2020-05-25 13:26:55 +03:00
|
|
|
dprintf(stderr, "%s, %s(): remove setjmp %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->penv);
|
|
|
|
BOUND_FREE (cur);
|
2020-05-23 21:02:41 +03:00
|
|
|
cur = last ? last->next : jmp_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
2020-01-15 10:53:19 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
} while (cur);
|
2020-01-15 10:53:19 +03:00
|
|
|
}
|
2020-01-18 00:58:39 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = p1;
|
|
|
|
while ((addr = p[0])) {
|
|
|
|
if (addr != 1) {
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) (addr + fp), (unsigned long) p[1]);
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
p+= 2;
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
/* used by alloca */
|
|
|
|
void __bound_new_region(void *p, size_t size)
|
|
|
|
{
|
|
|
|
size_t fp;
|
|
|
|
alloca_list_type *last;
|
|
|
|
alloca_list_type *cur;
|
|
|
|
alloca_list_type *new;
|
|
|
|
|
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, p, (unsigned long)size);
|
|
|
|
GET_CALLER_FP (fp);
|
2020-05-23 21:02:41 +03:00
|
|
|
new = BOUND_MALLOC (sizeof (alloca_list_type));
|
2020-01-15 10:53:19 +03:00
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_alloca_count);
|
|
|
|
last = NULL;
|
|
|
|
cur = alloca_list;
|
|
|
|
while (cur) {
|
|
|
|
if (cur->fp == fp && cur->p == p) {
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
|
|
|
tree = splay_delete((size_t)p, tree);
|
|
|
|
break;
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
|
|
|
if (no_checking == 0)
|
|
|
|
tree = splay_insert((size_t)p, size, tree);
|
|
|
|
if (new) {
|
|
|
|
new->fp = fp;
|
|
|
|
new->p = p;
|
|
|
|
new->next = alloca_list;
|
|
|
|
alloca_list = new;
|
2002-01-05 19:16:47 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
POST_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
if (cur) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, __FUNCTION__, cur->p);
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE (cur);
|
2020-01-15 10:53:19 +03:00
|
|
|
}
|
2002-01-05 19:16:47 +03:00
|
|
|
}
|
|
|
|
|
2020-05-23 21:02:41 +03:00
|
|
|
void __bound_setjmp(jmp_buf env)
|
|
|
|
{
|
|
|
|
jmp_list_type *jl;
|
|
|
|
void *e = (void *) env;
|
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
if (no_checking == 0) {
|
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, __FUNCTION__, e);
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_setjmp_count);
|
|
|
|
jl = jmp_list;
|
|
|
|
while (jl) {
|
|
|
|
if (jl->penv == e)
|
|
|
|
break;
|
|
|
|
jl = jl->next;
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
if (jl == NULL) {
|
|
|
|
jl = BOUND_MALLOC (sizeof (jmp_list_type));
|
|
|
|
if (jl) {
|
|
|
|
jl->penv = e;
|
|
|
|
jl->next = jmp_list;
|
|
|
|
jmp_list = jl;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (jl) {
|
|
|
|
size_t fp;
|
2020-05-23 21:02:41 +03:00
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
GET_CALLER_FP (fp);
|
|
|
|
jl->fp = fp;
|
|
|
|
jl->end_fp = (size_t)__builtin_frame_address(0);
|
|
|
|
jl->tid = BOUND_GET_TID;
|
|
|
|
}
|
|
|
|
POST_SEM ();
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
static void __bound_long_jump(jmp_buf env, int val, int sig, const char *func)
|
2020-05-23 21:02:41 +03:00
|
|
|
{
|
|
|
|
jmp_list_type *jl;
|
2020-05-25 13:26:55 +03:00
|
|
|
void *e;
|
|
|
|
BOUND_TID_TYPE tid;
|
|
|
|
|
|
|
|
if (no_checking == 0) {
|
|
|
|
e = (void *)env;
|
|
|
|
tid = BOUND_GET_TID;
|
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, func, e);
|
|
|
|
WAIT_SEM();
|
|
|
|
INCR_COUNT(bound_longjmp_count);
|
|
|
|
jl = jmp_list;
|
|
|
|
while (jl) {
|
|
|
|
if (jl->penv == e && jl->tid == tid) {
|
|
|
|
size_t start_fp = (size_t)__builtin_frame_address(0);
|
|
|
|
size_t end_fp = jl->end_fp;
|
|
|
|
jmp_list_type *cur = jmp_list;
|
|
|
|
jmp_list_type *last = NULL;
|
|
|
|
|
|
|
|
while (cur->penv != e || cur->tid != tid) {
|
|
|
|
if (cur->tid == tid) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove setjmp %p\n",
|
|
|
|
__FILE__, func, cur->penv);
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
jmp_list = cur->next;
|
|
|
|
BOUND_FREE (cur);
|
|
|
|
cur = last ? last->next : jmp_list;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
for (;;) {
|
|
|
|
Tree *t = tree;
|
|
|
|
alloca_list_type *last;
|
|
|
|
alloca_list_type *cur;
|
|
|
|
|
|
|
|
while (t && (t->start < start_fp || t->start > end_fp))
|
|
|
|
if (t->start < start_fp)
|
|
|
|
t = t->right;
|
|
|
|
else
|
|
|
|
t = t->left;
|
|
|
|
if (t == NULL)
|
|
|
|
break;
|
|
|
|
last = NULL;
|
|
|
|
cur = alloca_list;
|
|
|
|
while (cur) {
|
|
|
|
if ((size_t) cur->p == t->start) {
|
|
|
|
dprintf(stderr, "%s, %s(): remove alloca/vla %p\n",
|
|
|
|
__FILE__, func, cur->p);
|
|
|
|
if (last)
|
|
|
|
last->next = cur->next;
|
|
|
|
else
|
|
|
|
alloca_list = cur->next;
|
|
|
|
BOUND_FREE (cur);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
last = cur;
|
|
|
|
cur = cur->next;
|
|
|
|
}
|
|
|
|
dprintf(stderr, "%s, %s(): delete %p\n",
|
|
|
|
__FILE__, func, (void *) t->start);
|
|
|
|
tree = splay_delete(t->start, tree);
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
break;
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
jl = jl->next;
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
POST_SEM();
|
2020-05-23 21:02:41 +03:00
|
|
|
}
|
2020-05-25 13:26:55 +03:00
|
|
|
#if !defined(_WIN32)
|
|
|
|
sig ? siglongjmp(env, val) :
|
|
|
|
#endif
|
2020-05-23 21:02:41 +03:00
|
|
|
longjmp (env, val);
|
|
|
|
}
|
|
|
|
|
2020-05-25 13:26:55 +03:00
|
|
|
void __bound_longjmp(jmp_buf env, int val)
|
|
|
|
{
|
|
|
|
__bound_long_jump(env,val, 0, __FUNCTION__);
|
|
|
|
}
|
|
|
|
|
|
|
|
#if !defined(_WIN32)
|
|
|
|
void __bound_siglongjmp(jmp_buf env, int val)
|
|
|
|
{
|
|
|
|
__bound_long_jump(env,val, 1, __FUNCTION__);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#if defined(__GNUC__) && (__GNUC__ >= 6)
|
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
void __bound_init(size_t *p)
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2020-01-18 00:58:39 +03:00
|
|
|
dprintf(stderr, "%s, %s(): start\n", __FILE__, __FUNCTION__);
|
2015-04-10 07:37:31 +03:00
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
if (inited) {
|
|
|
|
WAIT_SEM();
|
|
|
|
goto add_bounds;
|
|
|
|
}
|
2015-04-10 07:37:31 +03:00
|
|
|
inited = 1;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
print_warn_ptr_add = getenv ("TCC_BOUNDS_WARN_POINTER_ADD") != NULL;
|
2019-12-10 10:07:25 +03:00
|
|
|
print_calls = getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL;
|
2019-12-10 21:47:33 +03:00
|
|
|
print_heap = getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL;
|
2019-12-12 22:49:35 +03:00
|
|
|
print_statistic = getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL;
|
2019-12-10 10:07:25 +03:00
|
|
|
never_fatal = getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL;
|
|
|
|
|
|
|
|
INIT_SEM ();
|
2002-01-05 03:41:11 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
{
|
|
|
|
void *addr = RTLD_NEXT;
|
2002-01-04 02:12:29 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
/* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT */
|
|
|
|
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
|
|
|
|
if (malloc_redir == NULL) {
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): use RTLD_DEFAULT\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__);
|
|
|
|
addr = RTLD_DEFAULT;
|
|
|
|
*(void **) (&malloc_redir) = dlsym (addr, "malloc");
|
|
|
|
}
|
|
|
|
*(void **) (&calloc_redir) = dlsym (addr, "calloc");
|
|
|
|
*(void **) (&free_redir) = dlsym (addr, "free");
|
|
|
|
*(void **) (&realloc_redir) = dlsym (addr, "realloc");
|
|
|
|
*(void **) (&memalign_redir) = dlsym (addr, "memalign");
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): malloc_redir %p\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__, malloc_redir);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): free_redir %p\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__, free_redir);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): realloc_redir %p\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__, realloc_redir);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): memalign_redir %p\n",
|
2019-12-10 10:07:25 +03:00
|
|
|
__FILE__, __FUNCTION__, memalign_redir);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (malloc_redir == NULL || free_redir == NULL)
|
|
|
|
bound_alloc_error ("Cannot redirect malloc/free");
|
|
|
|
#if HAVE_PTHREAD_CREATE
|
|
|
|
*(void **) (&pthread_create_redir) = dlsym (addr, "pthread_create");
|
|
|
|
dprintf(stderr, "%s, %s(): pthread_create_redir %p\n",
|
|
|
|
__FILE__, __FUNCTION__, pthread_create_redir);
|
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
2002-01-05 03:41:11 +03:00
|
|
|
#endif
|
2002-07-25 02:11:56 +04:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#ifdef __linux__
|
|
|
|
{
|
|
|
|
FILE *fp;
|
|
|
|
unsigned char found;
|
|
|
|
unsigned long long start;
|
|
|
|
unsigned long long end;
|
|
|
|
unsigned long long ad =
|
|
|
|
(unsigned long long) __builtin_return_address(0);
|
|
|
|
char line[1000];
|
|
|
|
|
|
|
|
/* Display exec name. Usefull when a lot of code is compiled with tcc */
|
|
|
|
fp = fopen ("/proc/self/comm", "r");
|
|
|
|
if (fp) {
|
|
|
|
memset (exec, 0, sizeof(exec));
|
|
|
|
fread (exec, 1, sizeof(exec) - 2, fp);
|
|
|
|
if (strchr(exec,'\n'))
|
|
|
|
*strchr(exec,'\n') = '\0';
|
|
|
|
strcat (exec, ":");
|
|
|
|
fclose (fp);
|
|
|
|
}
|
|
|
|
/* check if dlopen is used (is threre a better way?) */
|
|
|
|
found = 0;
|
|
|
|
fp = fopen ("/proc/self/maps", "r");
|
|
|
|
if (fp) {
|
|
|
|
while (fgets (line, sizeof(line), fp)) {
|
|
|
|
if (sscanf (line, "%Lx-%Lx", &start, &end) == 2 &&
|
|
|
|
ad >= start && ad < end) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (strstr (line,"[heap]"))
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
fclose (fp);
|
|
|
|
}
|
|
|
|
if (found == 0) {
|
|
|
|
use_sem = 1;
|
|
|
|
no_strdup = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2015-04-10 15:17:22 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
WAIT_SEM ();
|
2015-04-10 15:17:22 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#if HAVE_CTYPE
|
|
|
|
/* XXX: Does not work if locale is changed */
|
|
|
|
tree = splay_insert((size_t) __ctype_b_loc(),
|
|
|
|
sizeof (unsigned short *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_b_loc() - 128),
|
|
|
|
384 * sizeof (unsigned short), tree);
|
|
|
|
tree = splay_insert((size_t) __ctype_tolower_loc(),
|
|
|
|
sizeof (__int32_t *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_tolower_loc() - 128),
|
|
|
|
384 * sizeof (__int32_t), tree);
|
|
|
|
tree = splay_insert((size_t) __ctype_toupper_loc(),
|
|
|
|
sizeof (__int32_t *), tree);
|
|
|
|
tree = splay_insert((size_t) (*__ctype_toupper_loc() - 128),
|
|
|
|
384 * sizeof (__int32_t), tree);
|
|
|
|
#endif
|
|
|
|
#if HAVE_ERRNO
|
|
|
|
tree = splay_insert((size_t) (&errno), sizeof (int), tree);
|
|
|
|
#endif
|
2014-03-29 10:28:02 +04:00
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
add_bounds:
|
|
|
|
if (!p)
|
|
|
|
goto no_bounds;
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
/* add all static bound check values */
|
|
|
|
while (p[0] != 0) {
|
|
|
|
tree = splay_insert(p[0], p[1], tree);
|
2020-01-15 10:53:19 +03:00
|
|
|
#if BOUND_DEBUG
|
2020-01-18 00:58:39 +03:00
|
|
|
if (print_calls) {
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): static var %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
(void *) p[0], (unsigned long) p[1]);
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-18 00:58:39 +03:00
|
|
|
p += 2;
|
|
|
|
}
|
|
|
|
no_bounds:
|
2020-01-15 10:53:19 +03:00
|
|
|
|
2020-01-18 00:58:39 +03:00
|
|
|
POST_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
no_checking = 0;
|
|
|
|
dprintf(stderr, "%s, %s(): end\n\n", __FILE__, __FUNCTION__);
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
void __bound_main_arg(char **p)
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2019-12-10 10:07:25 +03:00
|
|
|
char *start = (char *) p;
|
|
|
|
|
|
|
|
WAIT_SEM ();
|
|
|
|
while (*p) {
|
|
|
|
tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
|
2020-01-15 10:53:19 +03:00
|
|
|
++p;
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
tree = splay_insert((size_t) start, (char *) p - start, tree);
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = (char **) start;
|
|
|
|
while (*p) {
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
*p, (unsigned long)(strlen (*p) + 1));
|
|
|
|
++p;
|
|
|
|
}
|
|
|
|
dprintf(stderr, "%s, %s(): argv %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
start, (unsigned long)((char *) p - start));
|
|
|
|
}
|
|
|
|
#endif
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
#if HAS_ENVIRON
|
|
|
|
{
|
|
|
|
extern char **environ;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
WAIT_SEM ();
|
2019-12-10 10:07:25 +03:00
|
|
|
p = environ;
|
|
|
|
start = (char *) p;
|
|
|
|
while (*p) {
|
|
|
|
tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
|
2020-01-15 10:53:19 +03:00
|
|
|
++p;
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
tree = splay_insert((size_t) start, (char *) p - start, tree);
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
|
|
|
#if BOUND_DEBUG
|
|
|
|
if (print_calls) {
|
|
|
|
p = environ;
|
|
|
|
while (*p) {
|
|
|
|
dprintf(stderr, "%s, %s(): %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
*p, (unsigned long)(strlen (*p) + 1));
|
|
|
|
++p;
|
|
|
|
}
|
|
|
|
dprintf(stderr, "%s, %s(): environ %p 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__,
|
|
|
|
start, (unsigned long)((char *) p - start));
|
|
|
|
}
|
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2019-12-10 21:47:33 +03:00
|
|
|
void __attribute__((destructor)) __bound_exit(void)
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2019-12-10 10:07:25 +03:00
|
|
|
int i;
|
2019-12-10 21:47:33 +03:00
|
|
|
static const char * const alloc_type[] = {
|
2019-12-12 22:49:35 +03:00
|
|
|
"", "malloc", "calloc", "realloc", "memalign", "strdup"
|
2019-12-10 21:47:33 +03:00
|
|
|
};
|
2016-10-01 21:47:36 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s():\n", __FILE__, __FUNCTION__);
|
2019-12-10 21:47:33 +03:00
|
|
|
|
|
|
|
if (inited) {
|
|
|
|
#if !defined(_WIN32)
|
|
|
|
if (print_heap) {
|
2020-01-15 10:53:19 +03:00
|
|
|
extern void __libc_freeres (void);
|
2019-12-10 21:47:33 +03:00
|
|
|
__libc_freeres ();
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
|
|
|
|
no_checking = 1;
|
|
|
|
|
|
|
|
WAIT_SEM ();
|
|
|
|
while (alloca_list) {
|
|
|
|
alloca_list_type *next = alloca_list->next;
|
|
|
|
|
|
|
|
tree = splay_delete ((size_t) alloca_list->p, tree);
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE (alloca_list);
|
2020-01-15 10:53:19 +03:00
|
|
|
alloca_list = next;
|
|
|
|
}
|
2020-05-23 21:02:41 +03:00
|
|
|
while (jmp_list) {
|
|
|
|
jmp_list_type *next = jmp_list->next;
|
|
|
|
|
|
|
|
BOUND_FREE (jmp_list);
|
|
|
|
jmp_list = next;
|
|
|
|
}
|
2019-12-10 21:47:33 +03:00
|
|
|
for (i = 0; i < FREE_REUSE_SIZE; i++) {
|
|
|
|
if (free_reuse_list[i]) {
|
|
|
|
tree = splay_delete ((size_t) free_reuse_list[i], tree);
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE (free_reuse_list[i]);
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
while (tree) {
|
2020-01-15 10:53:19 +03:00
|
|
|
if (print_heap && tree->type != 0)
|
|
|
|
fprintf (stderr, "%s, %s(): %s found size %lu\n",
|
2019-12-10 21:47:33 +03:00
|
|
|
__FILE__, __FUNCTION__, alloc_type[tree->type],
|
|
|
|
(unsigned long) tree->size);
|
|
|
|
tree = splay_delete (tree->start, tree);
|
|
|
|
}
|
2019-12-23 22:23:18 +03:00
|
|
|
#if TREE_REUSE
|
|
|
|
while (tree_free_list) {
|
|
|
|
Tree *next = tree_free_list->left;
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE (tree_free_list);
|
2019-12-23 22:23:18 +03:00
|
|
|
tree_free_list = next;
|
|
|
|
}
|
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
2019-12-10 21:47:33 +03:00
|
|
|
EXIT_SEM ();
|
|
|
|
inited = 0;
|
2019-12-12 22:49:35 +03:00
|
|
|
if (print_statistic) {
|
2020-01-15 10:53:19 +03:00
|
|
|
#if BOUND_STATISTIC
|
2019-12-12 22:49:35 +03:00
|
|
|
fprintf (stderr, "bound_ptr_add_count %llu\n", bound_ptr_add_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count);
|
|
|
|
fprintf (stderr, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count);
|
|
|
|
fprintf (stderr, "bound_local_new_count %llu\n", bound_local_new_count);
|
|
|
|
fprintf (stderr, "bound_local_delete_count %llu\n", bound_local_delete_count);
|
|
|
|
fprintf (stderr, "bound_malloc_count %llu\n", bound_malloc_count);
|
|
|
|
fprintf (stderr, "bound_calloc_count %llu\n", bound_calloc_count);
|
|
|
|
fprintf (stderr, "bound_realloc_count %llu\n", bound_realloc_count);
|
|
|
|
fprintf (stderr, "bound_free_count %llu\n", bound_free_count);
|
|
|
|
fprintf (stderr, "bound_memalign_count %llu\n", bound_memalign_count);
|
|
|
|
fprintf (stderr, "bound_mmap_count %llu\n", bound_mmap_count);
|
|
|
|
fprintf (stderr, "bound_munmap_count %llu\n", bound_munmap_count);
|
|
|
|
fprintf (stderr, "bound_alloca_count %llu\n", bound_alloca_count);
|
2020-05-23 21:02:41 +03:00
|
|
|
fprintf (stderr, "bound_setjmp_count %llu\n", bound_setjmp_count);
|
|
|
|
fprintf (stderr, "bound_longjmp_count %llu\n", bound_longjmp_count);
|
2019-12-12 22:49:35 +03:00
|
|
|
fprintf (stderr, "bound_mempcy_count %llu\n", bound_mempcy_count);
|
|
|
|
fprintf (stderr, "bound_memcmp_count %llu\n", bound_memcmp_count);
|
|
|
|
fprintf (stderr, "bound_memmove_count %llu\n", bound_memmove_count);
|
|
|
|
fprintf (stderr, "bound_memset_count %llu\n", bound_memset_count);
|
|
|
|
fprintf (stderr, "bound_strlen_count %llu\n", bound_strlen_count);
|
|
|
|
fprintf (stderr, "bound_strcpy_count %llu\n", bound_strcpy_count);
|
|
|
|
fprintf (stderr, "bound_strncpy_count %llu\n", bound_strncpy_count);
|
|
|
|
fprintf (stderr, "bound_strcmp_count %llu\n", bound_strcmp_count);
|
|
|
|
fprintf (stderr, "bound_strncmp_count %llu\n", bound_strncmp_count);
|
|
|
|
fprintf (stderr, "bound_strcat_count %llu\n", bound_strcat_count);
|
|
|
|
fprintf (stderr, "bound_strchr_count %llu\n", bound_strchr_count);
|
|
|
|
fprintf (stderr, "bound_strdup_count %llu\n", bound_strdup_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
fprintf (stderr, "bound_not_found %llu\n", bound_not_found);
|
|
|
|
#endif
|
|
|
|
#if BOUND_STATISTIC_SPLAY
|
|
|
|
fprintf (stderr, "bound_splay %llu\n", bound_splay);
|
|
|
|
fprintf (stderr, "bound_splay_end %llu\n", bound_splay_end);
|
|
|
|
fprintf (stderr, "bound_splay_insert %llu\n", bound_splay_insert);
|
|
|
|
fprintf (stderr, "bound_splay_delete %llu\n", bound_splay_delete);
|
2019-12-12 22:49:35 +03:00
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2002-01-05 03:41:11 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#if HAVE_PTHREAD_CREATE
|
|
|
|
int pthread_create(pthread_t *thread, const pthread_attr_t *attr,
|
|
|
|
void *(*start_routine) (void *), void *arg)
|
|
|
|
{
|
|
|
|
use_sem = 1;
|
2020-05-05 09:31:57 +03:00
|
|
|
dprintf (stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
|
2020-01-15 10:53:19 +03:00
|
|
|
return pthread_create_redir(thread, attr, start_routine, arg);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *malloc(size_t size)
|
|
|
|
#else
|
2002-01-05 03:41:11 +03:00
|
|
|
void *__bound_malloc(size_t size, const void *caller)
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
|
|
|
void *ptr;
|
2015-07-29 23:53:57 +03:00
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
/* This will catch the first dlsym call from __bound_init */
|
|
|
|
if (malloc_redir == NULL) {
|
2020-01-18 00:58:39 +03:00
|
|
|
__bound_init (0);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (malloc_redir == NULL) {
|
|
|
|
ptr = &initial_pool[pool_index];
|
|
|
|
pool_index = (pool_index + size + 15) & ~15;
|
|
|
|
if (pool_index >= sizeof (initial_pool))
|
|
|
|
bound_alloc_error ("initial memory pool too small");
|
|
|
|
dprintf (stderr, "%s, %s(): initial %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
/* we allocate one more byte to ensure the regions will be
|
|
|
|
separated by at least one byte. With the glibc malloc, it may
|
|
|
|
be in fact not necessary */
|
2020-05-23 21:02:41 +03:00
|
|
|
ptr = BOUND_MALLOC (size + 1);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
2015-07-29 23:53:57 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking == 0) {
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_malloc_count);
|
2015-03-29 11:28:02 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
if (ptr) {
|
|
|
|
tree = splay_insert ((size_t) ptr, size, tree);
|
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_MALLOC;
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2002-01-04 02:12:29 +03:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *memalign(size_t size, size_t align)
|
|
|
|
#else
|
2002-01-05 03:41:11 +03:00
|
|
|
void *__bound_memalign(size_t size, size_t align, const void *caller)
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-05 03:41:11 +03:00
|
|
|
{
|
|
|
|
void *ptr;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
#if HAVE_MEMALIGN
|
|
|
|
/* we allocate one more byte to ensure the regions will be
|
|
|
|
separated by at least one byte. With the glibc malloc, it may
|
|
|
|
be in fact not necessary */
|
2020-05-23 21:02:41 +03:00
|
|
|
ptr = BOUND_MEMALIGN(size + 1, align);
|
2020-01-15 10:53:19 +03:00
|
|
|
#else
|
2002-12-08 17:34:30 +03:00
|
|
|
if (align > 4) {
|
|
|
|
/* XXX: handle it ? */
|
|
|
|
ptr = NULL;
|
|
|
|
} else {
|
|
|
|
/* we suppose that malloc aligns to at least four bytes */
|
2020-05-23 21:02:41 +03:00
|
|
|
ptr = BOUND_MALLOC(size + 1);
|
2002-12-08 17:34:30 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
|
|
|
|
if (no_checking == 0) {
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_memalign_count);
|
|
|
|
|
|
|
|
if (ptr) {
|
|
|
|
tree = splay_insert((size_t) ptr, size, tree);
|
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_MEMALIGN;
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2002-01-05 03:41:11 +03:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void free(void *ptr)
|
|
|
|
#else
|
2002-01-05 03:41:11 +03:00
|
|
|
void __bound_free(void *ptr, const void *caller)
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2019-12-10 10:07:25 +03:00
|
|
|
size_t addr = (size_t) ptr;
|
|
|
|
void *p;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
if (ptr == NULL || tree == NULL || no_checking
|
2019-12-10 21:47:33 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
|| ((unsigned char *) ptr >= &initial_pool[0] &&
|
|
|
|
(unsigned char *) ptr < &initial_pool[sizeof(initial_pool)])
|
|
|
|
#endif
|
|
|
|
)
|
2002-01-04 02:12:29 +03:00
|
|
|
return;
|
2002-01-05 03:41:11 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p\n", __FILE__, __FUNCTION__, ptr);
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
WAIT_SEM ();
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_free_count);
|
2019-12-10 10:07:25 +03:00
|
|
|
tree = splay (addr, tree);
|
|
|
|
if (tree->start == addr) {
|
|
|
|
if (tree->is_invalid) {
|
|
|
|
POST_SEM ();
|
2020-01-15 10:53:19 +03:00
|
|
|
bound_error("freeing invalid region");
|
2019-12-10 10:07:25 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
tree->is_invalid = 1;
|
|
|
|
memset (ptr, 0x5a, tree->size);
|
|
|
|
p = free_reuse_list[free_reuse_index];
|
|
|
|
free_reuse_list[free_reuse_index] = ptr;
|
|
|
|
free_reuse_index = (free_reuse_index + 1) % FREE_REUSE_SIZE;
|
2020-01-15 10:53:19 +03:00
|
|
|
if (p)
|
2019-12-10 10:07:25 +03:00
|
|
|
tree = splay_delete((size_t)p, tree);
|
|
|
|
ptr = p;
|
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
POST_SEM ();
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE (ptr);
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *realloc(void *ptr, size_t size)
|
|
|
|
#else
|
2002-01-05 03:41:11 +03:00
|
|
|
void *__bound_realloc(void *ptr, size_t size, const void *caller)
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
void *new_ptr;
|
|
|
|
|
2002-01-04 02:12:29 +03:00
|
|
|
if (size == 0) {
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
free(ptr);
|
|
|
|
#else
|
2002-01-05 03:41:11 +03:00
|
|
|
__bound_free(ptr, caller);
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-01-04 02:12:29 +03:00
|
|
|
return NULL;
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
|
2020-05-23 21:02:41 +03:00
|
|
|
new_ptr = BOUND_REALLOC (ptr, size);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, new_ptr, (unsigned long)size);
|
|
|
|
|
|
|
|
if (no_checking == 0) {
|
2019-12-10 21:47:33 +03:00
|
|
|
WAIT_SEM ();
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_realloc_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
|
|
|
|
if (ptr)
|
|
|
|
tree = splay_delete ((size_t) ptr, tree);
|
|
|
|
if (new_ptr) {
|
|
|
|
tree = splay_insert ((size_t) new_ptr, size, tree);
|
|
|
|
if (tree && tree->start == (size_t) new_ptr)
|
2019-12-10 21:47:33 +03:00
|
|
|
tree->type = TCC_TYPE_REALLOC;
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2019-12-10 21:47:33 +03:00
|
|
|
POST_SEM ();
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
return new_ptr;
|
2002-01-04 02:12:29 +03:00
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
#if MALLOC_REDIR
|
|
|
|
void *calloc(size_t nmemb, size_t size)
|
|
|
|
#else
|
2002-03-04 01:45:55 +03:00
|
|
|
void *__bound_calloc(size_t nmemb, size_t size)
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
2002-03-04 01:45:55 +03:00
|
|
|
{
|
|
|
|
void *ptr;
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
size *= nmemb;
|
|
|
|
#if MALLOC_REDIR
|
2019-12-10 21:47:33 +03:00
|
|
|
/* This will catch the first dlsym call from __bound_init */
|
|
|
|
if (malloc_redir == NULL) {
|
2020-01-18 00:58:39 +03:00
|
|
|
__bound_init (0);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (malloc_redir == NULL) {
|
|
|
|
ptr = &initial_pool[pool_index];
|
|
|
|
pool_index = (pool_index + size + 15) & ~15;
|
|
|
|
if (pool_index >= sizeof (initial_pool))
|
|
|
|
bound_alloc_error ("initial memory pool too small");
|
|
|
|
dprintf (stderr, "%s, %s(): initial %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
memset (ptr, 0, size);
|
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
|
|
|
#endif
|
2020-05-23 21:02:41 +03:00
|
|
|
ptr = BOUND_MALLOC(size + 1);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf (stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, ptr, (unsigned long)size);
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
if (ptr) {
|
|
|
|
memset (ptr, 0, size);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking == 0) {
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_calloc_count);
|
|
|
|
tree = splay_insert ((size_t) ptr, size, tree);
|
|
|
|
if (tree && tree->start == (size_t) ptr)
|
|
|
|
tree->type = TCC_TYPE_CALLOC;
|
|
|
|
POST_SEM ();
|
2019-12-10 21:47:33 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2002-03-04 01:45:55 +03:00
|
|
|
return ptr;
|
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
|
|
|
|
#if !defined(_WIN32)
|
|
|
|
void *__bound_mmap (void *start, size_t size, int prot,
|
|
|
|
int flags, int fd, off_t offset)
|
|
|
|
{
|
|
|
|
void *result;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, start, (unsigned long)size);
|
2019-12-10 10:07:25 +03:00
|
|
|
result = mmap (start, size, prot, flags, fd, offset);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (result && no_checking == 0) {
|
2019-12-10 10:07:25 +03:00
|
|
|
WAIT_SEM ();
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_mmap_count);
|
2019-12-10 10:07:25 +03:00
|
|
|
tree = splay_insert((size_t)result, size, tree);
|
|
|
|
POST_SEM ();
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_munmap (void *start, size_t size)
|
|
|
|
{
|
|
|
|
int result;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, start, (unsigned long)size);
|
|
|
|
if (start && no_checking == 0) {
|
|
|
|
WAIT_SEM ();
|
|
|
|
INCR_COUNT(bound_munmap_count);
|
|
|
|
tree = splay_delete ((size_t) start, tree);
|
|
|
|
POST_SEM ();
|
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
result = munmap (start, size);
|
|
|
|
return result;
|
|
|
|
}
|
2002-03-04 01:45:55 +03:00
|
|
|
#endif
|
|
|
|
|
2002-01-05 20:03:56 +03:00
|
|
|
/* some useful checked functions */
|
|
|
|
|
|
|
|
/* check that (p ... p + size - 1) lies inside 'p' region, if any */
|
2019-12-13 15:45:09 +03:00
|
|
|
static void __bound_check(const void *p, size_t size, const char *function)
|
2002-01-05 20:03:56 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking == 0 && size != 0 &&
|
|
|
|
__bound_ptr_add((void *)p, size) == INVALID_POINTER) {
|
2020-01-18 00:58:39 +03:00
|
|
|
bound_error("invalid pointer %p, size 0x%lx in %s",
|
2020-01-15 10:53:19 +03:00
|
|
|
p, (unsigned long)size, function);
|
|
|
|
}
|
2002-01-05 20:03:56 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
static int check_overlap (const void *p1, size_t n1,
|
|
|
|
const void *p2, size_t n2,
|
|
|
|
const char *function)
|
2002-01-05 20:03:56 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
const void *p1e = (const void *) ((const char *) p1 + n1);
|
|
|
|
const void *p2e = (const void *) ((const char *) p2 + n2);
|
|
|
|
|
|
|
|
if (no_checking == 0 && n1 != 0 && n2 !=0 &&
|
|
|
|
((p1 <= p2 && p1e > p2) || /* p1----p2====p1e----p2e */
|
|
|
|
(p2 <= p1 && p2e > p1))) { /* p2----p1====p2e----p1e */
|
2020-01-18 00:58:39 +03:00
|
|
|
bound_error("overlapping regions %p(0x%lx), %p(0x%lx) in %s",
|
2020-01-15 10:53:19 +03:00
|
|
|
p1, (unsigned long)n1, p2, (unsigned long)n2, function);
|
|
|
|
return never_fatal < 0;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
2015-04-10 15:17:22 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
void *__bound_memcpy(void *dest, const void *src, size_t n)
|
|
|
|
{
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_mempcy_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(dest, n, "memcpy dest");
|
|
|
|
__bound_check(src, n, "memcpy src");
|
|
|
|
if (check_overlap(dest, n, src, n, "memcpy"))
|
|
|
|
return dest;
|
|
|
|
return memcpy(dest, src, n);
|
2002-01-05 20:03:56 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
int __bound_memcmp(const void *s1, const void *s2, size_t n)
|
2019-12-12 22:49:35 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
int retval = 0;
|
|
|
|
|
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2, (unsigned long)n);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_memcmp_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
for (;;) {
|
|
|
|
if ((ssize_t) --n == -1)
|
|
|
|
break;
|
|
|
|
else if (*u1 != *u2) {
|
|
|
|
retval = *u1++ - *u2++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
++u1;
|
|
|
|
++u2;
|
|
|
|
}
|
|
|
|
__bound_check(s1, (const void *)u1 - s1, "memcmp s1");
|
|
|
|
__bound_check(s2, (const void *)u2 - s2, "memcmp s2");
|
|
|
|
return retval;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
void *__bound_memmove(void *dest, const void *src, size_t n)
|
2002-01-05 20:03:56 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_memmove_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(dest, n, "memmove dest");
|
|
|
|
__bound_check(src, n, "memmove src");
|
|
|
|
return memmove(dest, src, n);
|
2002-01-05 20:03:56 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
void *__bound_memset(void *s, int c, size_t n)
|
2002-01-05 20:03:56 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %d, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s, c, (unsigned long)n);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_memset_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(s, n, "memset");
|
|
|
|
return memset(s, c, n);
|
2002-01-05 20:03:56 +03:00
|
|
|
}
|
|
|
|
|
2002-01-26 21:53:47 +03:00
|
|
|
int __bound_strlen(const char *s)
|
|
|
|
{
|
2019-12-12 22:49:35 +03:00
|
|
|
const char *p = s;
|
2002-01-26 21:53:47 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p\n",
|
|
|
|
__FILE__, __FUNCTION__, s);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strlen_count);
|
|
|
|
while (*p++);
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(s, p - s, "strlen");
|
|
|
|
return (p - s) - 1;
|
2002-01-26 21:53:47 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
char *__bound_strcpy(char *dest, const char *src)
|
2002-01-26 21:53:47 +03:00
|
|
|
{
|
2015-04-10 15:17:22 +03:00
|
|
|
size_t len;
|
2020-01-15 10:53:19 +03:00
|
|
|
const char *p = src;
|
2015-04-10 15:17:22 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strcpy_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
while (*p++);
|
|
|
|
len = p - src;
|
|
|
|
__bound_check(dest, len, "strcpy dest");
|
|
|
|
__bound_check(src, len, "strcpy src");
|
|
|
|
if (check_overlap(dest, len, src, len, "strcpy"))
|
|
|
|
return dest;
|
|
|
|
return strcpy (dest, src);
|
2002-01-26 21:53:47 +03:00
|
|
|
}
|
2019-12-10 10:07:25 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
char *__bound_strncpy(char *dest, const char *src, size_t n)
|
2019-12-12 22:49:35 +03:00
|
|
|
{
|
2019-12-13 12:02:20 +03:00
|
|
|
size_t len = n;
|
|
|
|
const char *p = src;
|
2019-12-12 22:49:35 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src, (unsigned long)n);
|
|
|
|
INCR_COUNT(bound_strncpy_count);
|
2019-12-13 12:02:20 +03:00
|
|
|
while (len-- && *p++);
|
|
|
|
len = p - src;
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(dest, len, "strncpy dest");
|
|
|
|
__bound_check(src, len, "strncpy src");
|
|
|
|
if (check_overlap(dest, len, src, len, "strncpy"))
|
|
|
|
return dest;
|
|
|
|
return strncpy(dest, src, n);
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_strcmp(const char *s1, const char *s2)
|
|
|
|
{
|
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strcmp_count);
|
|
|
|
while (*u1 && *u1 == *u2) {
|
2020-01-15 10:53:19 +03:00
|
|
|
++u1;
|
|
|
|
++u2;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(s1, ((const char *)u1 - s1) + 1, "strcmp s1");
|
|
|
|
__bound_check(s2, ((const char *)u2 - s2) + 1, "strcmp s2");
|
|
|
|
return *u1 - *u2;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
int __bound_strncmp(const char *s1, const char *s2, size_t n)
|
|
|
|
{
|
2019-12-13 12:02:20 +03:00
|
|
|
const unsigned char *u1 = (const unsigned char *) s1;
|
|
|
|
const unsigned char *u2 = (const unsigned char *) s2;
|
|
|
|
int retval = 0;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, s1, s2, (unsigned long)n);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strncmp_count);
|
2019-12-13 12:02:20 +03:00
|
|
|
do {
|
|
|
|
if ((ssize_t) --n == -1)
|
|
|
|
break;
|
|
|
|
else if (*u1 != *u2) {
|
2020-01-15 10:53:19 +03:00
|
|
|
retval = *u1++ - *u2++;
|
2019-12-13 12:02:20 +03:00
|
|
|
break;
|
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
++u2;
|
2019-12-13 12:02:20 +03:00
|
|
|
} while (*u1++);
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(s1, (const char *)u1 - s1, "strncmp s1");
|
|
|
|
__bound_check(s2, (const char *)u2 - s2, "strncmp s2");
|
2019-12-13 12:02:20 +03:00
|
|
|
return retval;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
char *__bound_strcat(char *dest, const char *src)
|
|
|
|
{
|
|
|
|
char *r = dest;
|
|
|
|
const char *s = src;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %p\n",
|
|
|
|
__FILE__, __FUNCTION__, dest, src);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strcat_count);
|
|
|
|
while (*dest++);
|
2020-01-15 10:53:19 +03:00
|
|
|
while (*src++);
|
|
|
|
__bound_check(r, (dest - r) + (src - s) - 1, "strcat dest");
|
|
|
|
__bound_check(s, src - s, "strcat src");
|
|
|
|
if (check_overlap(r, (dest - r) + (src - s) - 1, s, src - s, "strcat"))
|
|
|
|
return dest;
|
|
|
|
return strcat(r, s);
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
char *__bound_strchr(const char *s, int c)
|
2019-12-12 22:49:35 +03:00
|
|
|
{
|
2020-01-15 10:53:19 +03:00
|
|
|
const unsigned char *str = (const unsigned char *) s;
|
|
|
|
unsigned char ch = c;
|
2019-12-12 22:49:35 +03:00
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, %d\n",
|
|
|
|
__FILE__, __FUNCTION__, s, ch);
|
2019-12-12 22:49:35 +03:00
|
|
|
INCR_COUNT(bound_strchr_count);
|
2020-01-15 10:53:19 +03:00
|
|
|
while (*str) {
|
|
|
|
if (*str == ch)
|
2019-12-12 22:49:35 +03:00
|
|
|
break;
|
2020-01-15 10:53:19 +03:00
|
|
|
++str;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
2020-01-15 10:53:19 +03:00
|
|
|
__bound_check(s, ((const char *)str - s) + 1, "strchr");
|
|
|
|
return *str == ch ? (char *) str : NULL;
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
char *__bound_strdup(const char *s)
|
|
|
|
{
|
|
|
|
const char *p = s;
|
|
|
|
char *new;
|
|
|
|
|
|
|
|
INCR_COUNT(bound_strdup_count);
|
|
|
|
while (*p++);
|
2019-12-13 15:45:09 +03:00
|
|
|
__bound_check(s, p - s, "strdup");
|
2020-05-23 21:02:41 +03:00
|
|
|
new = BOUND_MALLOC ((p - s) + 1);
|
2020-01-15 10:53:19 +03:00
|
|
|
dprintf(stderr, "%s, %s(): %p, 0x%lx\n",
|
|
|
|
__FILE__, __FUNCTION__, new, (unsigned long)(p -s));
|
2019-12-12 22:49:35 +03:00
|
|
|
if (new) {
|
2020-01-15 10:53:19 +03:00
|
|
|
if (no_checking == 0 && no_strdup == 0) {
|
|
|
|
WAIT_SEM ();
|
|
|
|
tree = splay_insert((size_t)new, p - s, tree);
|
|
|
|
if (tree && tree->start == (size_t) new)
|
|
|
|
tree->type = TCC_TYPE_STRDUP;
|
|
|
|
POST_SEM ();
|
2019-12-12 22:49:35 +03:00
|
|
|
}
|
|
|
|
memcpy (new, s, p - s);
|
|
|
|
}
|
|
|
|
return new;
|
|
|
|
}
|
|
|
|
|
2019-12-10 10:07:25 +03:00
|
|
|
/*
|
|
|
|
An implementation of top-down splaying with sizes
|
|
|
|
D. Sleator <sleator@cs.cmu.edu>, January 1994.
|
|
|
|
|
|
|
|
This extends top-down-splay.c to maintain a size field in each node.
|
|
|
|
This is the number of nodes in the subtree rooted there. This makes
|
|
|
|
it possible to efficiently compute the rank of a key. (The rank is
|
|
|
|
the number of nodes to the left of the given key.) It it also
|
|
|
|
possible to quickly find the node of a given rank. Both of these
|
|
|
|
operations are illustrated in the code below. The remainder of this
|
|
|
|
introduction is taken from top-down-splay.c.
|
|
|
|
|
|
|
|
"Splay trees", or "self-adjusting search trees" are a simple and
|
|
|
|
efficient data structure for storing an ordered set. The data
|
|
|
|
structure consists of a binary tree, with no additional fields. It
|
|
|
|
allows searching, insertion, deletion, deletemin, deletemax,
|
|
|
|
splitting, joining, and many other operations, all with amortized
|
|
|
|
logarithmic performance. Since the trees adapt to the sequence of
|
|
|
|
requests, their performance on real access patterns is typically even
|
|
|
|
better. Splay trees are described in a number of texts and papers
|
|
|
|
[1,2,3,4].
|
|
|
|
|
|
|
|
The code here is adapted from simple top-down splay, at the bottom of
|
|
|
|
page 669 of [2]. It can be obtained via anonymous ftp from
|
|
|
|
spade.pc.cs.cmu.edu in directory /usr/sleator/public.
|
|
|
|
|
|
|
|
The chief modification here is that the splay operation works even if the
|
|
|
|
item being splayed is not in the tree, and even if the tree root of the
|
|
|
|
tree is NULL. So the line:
|
|
|
|
|
|
|
|
t = splay(i, t);
|
|
|
|
|
|
|
|
causes it to search for item with key i in the tree rooted at t. If it's
|
|
|
|
there, it is splayed to the root. If it isn't there, then the node put
|
|
|
|
at the root is the last one before NULL that would have been reached in a
|
|
|
|
normal binary search for i. (It's a neighbor of i in the tree.) This
|
|
|
|
allows many other operations to be easily implemented, as shown below.
|
|
|
|
|
|
|
|
[1] "Data Structures and Their Algorithms", Lewis and Denenberg,
|
|
|
|
Harper Collins, 1991, pp 243-251.
|
|
|
|
[2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
|
|
|
|
JACM Volume 32, No 3, July 1985, pp 652-686.
|
|
|
|
[3] "Data Structure and Algorithm Analysis", Mark Weiss,
|
|
|
|
Benjamin Cummins, 1992, pp 119-130.
|
|
|
|
[4] "Data Structures, Algorithms, and Performance", Derick Wood,
|
|
|
|
Addison-Wesley, 1993, pp 367-375
|
|
|
|
*/
|
|
|
|
|
|
|
|
/* Code adapted for tcc */
|
|
|
|
|
|
|
|
#define compare(start,tstart,tsize) (start < tstart ? -1 : \
|
|
|
|
start >= tstart+tsize ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay (size_t addr, Tree *t)
|
|
|
|
/* Splay using the key start (which may or may not be in the tree.) */
|
2020-01-15 10:53:19 +03:00
|
|
|
/* The starting root is t, and the tree used is defined by rat */
|
2019-12-10 10:07:25 +03:00
|
|
|
{
|
|
|
|
Tree N, *l, *r, *y;
|
|
|
|
int comp;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
INCR_COUNT_SPLAY(bound_splay);
|
2019-12-10 10:07:25 +03:00
|
|
|
if (t == NULL) return t;
|
|
|
|
N.left = N.right = NULL;
|
|
|
|
l = r = &N;
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
comp = compare(addr, t->start, t->size);
|
|
|
|
if (comp < 0) {
|
|
|
|
y = t->left;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare(addr, y->start, y->size) < 0) {
|
|
|
|
t->left = y->right; /* rotate right */
|
|
|
|
y->right = t;
|
|
|
|
t = y;
|
|
|
|
if (t->left == NULL) break;
|
|
|
|
}
|
|
|
|
r->left = t; /* link right */
|
|
|
|
r = t;
|
|
|
|
t = t->left;
|
|
|
|
} else if (comp > 0) {
|
|
|
|
y = t->right;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare(addr, y->start, y->size) > 0) {
|
|
|
|
t->right = y->left; /* rotate left */
|
|
|
|
y->left = t;
|
|
|
|
t = y;
|
|
|
|
if (t->right == NULL) break;
|
|
|
|
}
|
|
|
|
l->right = t; /* link left */
|
|
|
|
l = t;
|
|
|
|
t = t->right;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
l->right = t->left; /* assemble */
|
|
|
|
r->left = t->right;
|
|
|
|
t->left = N.right;
|
|
|
|
t->right = N.left;
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define compare_end(start,tend) (start < tend ? -1 : \
|
|
|
|
start > tend ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay_end (size_t addr, Tree *t)
|
|
|
|
/* Splay using the key start (which may or may not be in the tree.) */
|
|
|
|
/* The starting root is t, and the tree used is defined by rat */
|
|
|
|
{
|
|
|
|
Tree N, *l, *r, *y;
|
|
|
|
int comp;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_end);
|
2019-12-10 10:07:25 +03:00
|
|
|
if (t == NULL) return t;
|
|
|
|
N.left = N.right = NULL;
|
|
|
|
l = r = &N;
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
comp = compare_end(addr, t->start + t->size);
|
|
|
|
if (comp < 0) {
|
|
|
|
y = t->left;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare_end(addr, y->start + y->size) < 0) {
|
|
|
|
t->left = y->right; /* rotate right */
|
|
|
|
y->right = t;
|
|
|
|
t = y;
|
|
|
|
if (t->left == NULL) break;
|
|
|
|
}
|
|
|
|
r->left = t; /* link right */
|
|
|
|
r = t;
|
|
|
|
t = t->left;
|
|
|
|
} else if (comp > 0) {
|
|
|
|
y = t->right;
|
|
|
|
if (y == NULL) break;
|
|
|
|
if (compare_end(addr, y->start + y->size) > 0) {
|
|
|
|
t->right = y->left; /* rotate left */
|
|
|
|
y->left = t;
|
|
|
|
t = y;
|
|
|
|
if (t->right == NULL) break;
|
|
|
|
}
|
|
|
|
l->right = t; /* link left */
|
|
|
|
l = t;
|
|
|
|
t = t->right;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
l->right = t->left; /* assemble */
|
|
|
|
r->left = t->right;
|
|
|
|
t->left = N.right;
|
|
|
|
t->right = N.left;
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static Tree * splay_insert(size_t addr, size_t size, Tree * t)
|
|
|
|
/* Insert key start into the tree t, if it is not already there. */
|
2020-01-15 10:53:19 +03:00
|
|
|
/* Return a pointer to the resulting tree. */
|
2019-12-10 10:07:25 +03:00
|
|
|
{
|
|
|
|
Tree * new;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_insert);
|
2019-12-10 10:07:25 +03:00
|
|
|
if (t != NULL) {
|
|
|
|
t = splay(addr,t);
|
|
|
|
if (compare(addr, t->start, t->size)==0) {
|
|
|
|
return t; /* it's already there */
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#if TREE_REUSE
|
|
|
|
if (tree_free_list) {
|
|
|
|
new = tree_free_list;
|
|
|
|
tree_free_list = new->left;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
#endif
|
|
|
|
{
|
2020-05-23 21:02:41 +03:00
|
|
|
new = (Tree *) BOUND_MALLOC (sizeof (Tree));
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
2019-12-13 15:45:09 +03:00
|
|
|
if (new == NULL) {
|
2020-01-15 10:53:19 +03:00
|
|
|
bound_alloc_error("not enough memory for bound checking code");
|
2019-12-13 15:45:09 +03:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
if (t == NULL) {
|
|
|
|
new->left = new->right = NULL;
|
|
|
|
} else if (compare(addr, t->start, t->size) < 0) {
|
|
|
|
new->left = t->left;
|
|
|
|
new->right = t;
|
|
|
|
t->left = NULL;
|
|
|
|
} else {
|
|
|
|
new->right = t->right;
|
|
|
|
new->left = t;
|
|
|
|
t->right = NULL;
|
|
|
|
}
|
|
|
|
new->start = addr;
|
|
|
|
new->size = size;
|
|
|
|
new->type = TCC_TYPE_NONE;
|
|
|
|
new->is_invalid = 0;
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
|
|
|
return new;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define compare_destroy(start,tstart) (start < tstart ? -1 : \
|
|
|
|
start > tstart ? 1 : 0)
|
|
|
|
|
|
|
|
static Tree * splay_delete(size_t addr, Tree *t)
|
|
|
|
/* Deletes addr from the tree if it's there. */
|
2020-01-15 10:53:19 +03:00
|
|
|
/* Return a pointer to the resulting tree. */
|
2019-12-10 10:07:25 +03:00
|
|
|
{
|
|
|
|
Tree * x;
|
|
|
|
|
2020-01-15 10:53:19 +03:00
|
|
|
INCR_COUNT_SPLAY(bound_splay_delete);
|
2019-12-10 10:07:25 +03:00
|
|
|
if (t==NULL) return NULL;
|
|
|
|
t = splay(addr,t);
|
2020-01-15 10:53:19 +03:00
|
|
|
if (compare_destroy(addr, t->start) == 0) { /* found it */
|
2019-12-10 10:07:25 +03:00
|
|
|
if (t->left == NULL) {
|
|
|
|
x = t->right;
|
|
|
|
} else {
|
|
|
|
x = splay(addr, t->left);
|
|
|
|
x->right = t->right;
|
|
|
|
}
|
|
|
|
#if TREE_REUSE
|
|
|
|
t->left = tree_free_list;
|
|
|
|
tree_free_list = t;
|
|
|
|
#else
|
2020-05-23 21:02:41 +03:00
|
|
|
BOUND_FREE(t);
|
2019-12-10 10:07:25 +03:00
|
|
|
#endif
|
|
|
|
return x;
|
|
|
|
} else {
|
2020-01-15 10:53:19 +03:00
|
|
|
return t; /* It wasn't there */
|
2019-12-10 10:07:25 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void splay_printtree(Tree * t, int d)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
if (t == NULL) return;
|
|
|
|
splay_printtree(t->right, d+1);
|
|
|
|
for (i=0; i<d; i++) fprintf(stderr," ");
|
2020-01-15 10:53:19 +03:00
|
|
|
fprintf(stderr,"%p(0x%lx:%u:%u)\n",
|
|
|
|
(void *) t->start, (unsigned long) t->size,
|
|
|
|
(unsigned)t->type, (unsigned)t->is_invalid);
|
2019-12-10 10:07:25 +03:00
|
|
|
splay_printtree(t->left, d+1);
|
|
|
|
}
|