summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKaz Kylheku <kaz@kylheku.com>2012-04-05 16:21:29 -0700
committerKaz Kylheku <kaz@kylheku.com>2012-04-05 16:21:29 -0700
commite46c958308fefc97c4dc106697283f33368af697 (patch)
tree32d171f6c751a7410cb044d22e1dbdf1230db85e
parent3a495eab4dcd66f8555828231e293b2d3cf307ef (diff)
parentff2df80b7b83f3fc003b124309a9101148f2d41f (diff)
downloadtxr-e46c958308fefc97c4dc106697283f33368af697.tar.gz
txr-e46c958308fefc97c4dc106697283f33368af697.tar.bz2
txr-e46c958308fefc97c4dc106697283f33368af697.zip
Merge branch 'ephemeral-gc'
Conflicts: ChangeLog
-rw-r--r--ChangeLog290
-rwxr-xr-xconfigure13
-rw-r--r--eval.c32
-rw-r--r--filter.c6
-rw-r--r--gc.c251
-rw-r--r--gc.h9
-rw-r--r--hash.c30
-rw-r--r--lib.c95
-rw-r--r--lib.h57
-rw-r--r--match.c12
-rw-r--r--stream.c12
-rw-r--r--unwind.c6
12 files changed, 654 insertions, 159 deletions
diff --git a/ChangeLog b/ChangeLog
index e1ec27a8..3d3bb344 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -3,6 +3,296 @@
* txr.vim: @[...] syntax not marked as "contained" because it
can freely occur, and is useful in @(output).
+2012-04-05 Kaz Kylheku <kaz@kylheku.com>
+
+ * gc.c (FRESHQ_SIZE): Preprocessor symbol renamed to FRESHOBJ_VEC_SIZE.
+ (freshobj, make_obj): Object and function definitions follow rename.
+
+2012-04-05 Kaz Kylheku <kaz@kylheku.com>
+
+ * gc.c (mark_obj, sweep_one, gc_is_reachable): Check for gen > 0 rather
+ than gen == 0. This allows gen == -1 objects to be considered the
+ same as gen == 0, and traversed.
+ (gc_set, gc_mutated): When a gen 0 object is added to the checkobj
+ array, set its generation to -1. This prevents duplicates in
+ the checkobj array. Also, it fixes a bug: an vector marked as
+ mutated was not being traversed due to being in generation 1.
+
+2012-04-05 Kaz Kylheku <kaz@kylheku.com>
+
+ Code cleanup and tweaking.
+
+ * gc.c (BACKPTR_VEC_SIZE): Preprocessor symbol renamed to
+ CHECKOBJ_VEC_SIZE.
+ (FULL_GC_INTERVAL): Increased to 40 since the modified
+ algorithm now leaves less work for the full gc to do.
+ (backptr, backptr_idx): Static variables renamed to
+ checkobj and checkobj_idx.
+ (mark): Follows rename of backptr and backptr_idx.
+ (gc): Commented out handy printf added.
+ (gc_set): Use in_malloc_range check to avoid adding to
+ the check array pointers which are being stored in non-heap locations,
+ since non-heap locations are already GC roots.
+ (gc_mutated): Follows variable renaming.
+ (gc_push): Just do the push using gc_set.
+
+ * lib.c (malloc_low_bound, malloc_high_bound): New variables.
+ (chk_malloc, chk_calloc, chk_realloc): Updated malloc_low_bound
+ and malloc_high_bound.
+ (in_malloc_range): New function.
+
+ * lib.h (in_malloc_range): Declared.
+
+2012-04-05 Kaz Kylheku <kaz@kylheku.com>
+
+ The mut macro should only be used for vectors or vector-like objects
+ which hold direct references to other objects and must be used
+ each time a mutation takes place.
+
+ * eval.c (op_dohash): invocations of mut macro removed.
+ Comment rewritten.
+
+ * lib.c (sort_list): Use set macro for mutating assignment.
+ Do not invoke mut on sorted list; it won't work anyway, because
+ it doesn't mean what the code wants it to mean: that the list will be
+ fully traversed during gc marking.
+
+2012-04-05 Kaz Kylheku <kaz@kylheku.com>
+
+ Bunch of fixes.
+
+ * gc.c (gc_mutated): Return the value.
+
+ * gc.h (gc_mutated): Declaration updated.
+
+ * hash.c (remhash): Fix unsafe assignment to use set macro.
+
+ * lib.c (sort): Fix wrong use of mut macro on the list
+ before it is sorted rather than after.
+
+ * lib.h (mut): Trivial version of macro updated to return argument.
+
+ * unwind.c (uw_init): The toplevel environment's match_context
+ should be gc_protected. Though this is probably not used,
+ which is why it has not been a problem.
+
+2012-04-04 Kaz Kylheku <kaz@kylheku.com>
+
+ * hash.c (hash_grow, gethash_l, gethash, gethash_f): Replace
+ useless use of *vecref_l() with vecref().
+
+2012-04-04 Kaz Kylheku <kaz@kylheku.com>
+
+ * configure (gen_gc): Default to off.
+ Help section added for gen_gc variable.
+
+ * gc.c (gc): Some missing CONFIG_GEN_GC added.
+
+2012-04-04 Kaz Kylheku <kaz@kylheku.com>
+
+ Code cleanup.
+
+ * gc.c (backptr_oflow): Static variable removed.
+ (freshq_head, freshq_tail, partial_gc_count): Static variables removed.
+ (freshq): Array renamed to freshobj.
+ (full): Variable renamed to full_gc.
+ (freshobj_idx): New varaible.
+
+ (make_obj): Add newly born objects to freshobj array rather than
+ freshq. If freshobj array is full on entry to this function,
+ trigger gc to empty it. make_obj no longer updates the free_tail;
+ the gc routine takes care of restoring this invariant.
+ (mark_obj): Follows rename of full_gc. Some code was not wrapped
+ in #if CONFIG_GEN_GC.
+ (mark, sweep_one): Follow rename of full_gc.
+ (sweep): On entry, restore free_tail invariant in the empty
+ free_list case. Code which processes freshq from tail to head
+ replaced by simple array walk of freshobj. Code wrapped properly
+ in #if CONFIG_GEN_GC.
+ (gc): Logic for triggering full gc simplified.
+ Check added for situations when a partial gc is called when
+ the free list empties, and it doesn't liberate enough memory.
+ This prevents the situation of partial gc being called over and over
+ again by make_obj, squeezing less and less memory each time until
+ finally it returns 0 objects, and more() is called.
+ (gc_is_reachable): Follows rename of full_gc, and #if CONFIG_GEN_GC
+ added.
+ (gc_set, gc_mutated): Simplified. Check if the backptr array
+ is full and trigger gc if so to flush it, then just add to the array.
+
+2012-04-03 Kaz Kylheku <kaz@kylheku.com>
+
+ Performance tweaking and fixes.
+
+ * gc.c (BACKPTR_VEC_SIZE): Increase greatly, so that we don't
+ trigger gc due to overflow of the backptr array. This is not likely
+ to yield a lot of free objects except in a full GC.
+ (FULL_GC_INTERVAL): From 10 to 20.
+ (gc): Take a not of whether or not gc was entered with free_list
+ being exhausted or not. Call more() only if the free_list was
+ empty, and a full sweep was done.
+ Reset partial_gc_count only when a full gc is triggered.
+
+2012-04-03 Kaz Kylheku <kaz@kylheku.com>
+
+ Fix failing test case tests/006/freeform-1.txr.
+
+ * lib.c (lazy_str_force, lazy_str_force_upto): Use set macro
+ when assigning lim. This won't cause a problem unless lim is
+ in the bignum range, however.
+ (acons_new, aconsq_new): When overwriting the cdr value of
+ the existing entry, use set. This is the smoking gun;
+ these functions are used for manipulating bindings.
+ (sort): After sorting a list, we must mark it as having
+ been mutated. If a list contains only mature conses or only
+ fresh conses, there is no problem. But if it contains a mixture,
+ then sorting could reverse their relationship, causing mature
+ conses to backpoint to the fresh ones.
+ (obj_init): Use set when installing the t symbol into the user package.
+
+2012-04-03 Kaz Kylheku <kaz@kylheku.com>
+
+ Generational GC showing signs of working. One test case in
+ test suite fails.
+
+ * gc.c (FRESHQ_SIZE): New preprocessor symbol.
+ (backptr_oflow, freshq, freshq_head, freshq_tail): New static
+ variables.
+ (make_obj): Place newly allocated generation 0 object into
+ freshq. If freshq becomes full, transfer oldest item into
+ generation 1.
+ (mark_obj): If doing only a partial gc, then do not mark
+ objects which are not generation 0.
+ (mark_mem_region): Valgrind support: cannot mark t.type field undefined
+ because it is a bitfield. Just mark the first SIZEOF_PTR bytes
+ of the object defined.
+ (mark): Under partial gc, mark the table of back pointers.
+ (sweep_one): New static function from the prior guts of sweep.
+ Reachable objects now get promoted to generation 1.
+ (sweep): Under partial gc, sweep just the freshq which identifies
+ the generation 0 objects, rather than the entire linked list of all the
+ heaps.
+ (gc): Trigger full gc also if the backptr list has overflowed
+ due to gc having been disabled.
+ Under generational gc, reset the static variables afterward:
+ clear the list of backpointers, and the freshq.
+ (gc_is_reachable): Under partial gc, report any mature object
+ as reachable.
+ (gc_set, gc_mutated): Handle backptr array overflow situation
+ when gc is disabled.
+ (gc_push): Bugfix: it is the newly pushed cons cell that has to be
+ marked as a root, not the value being pushed.
+
+ * hash.c (sethash): Use set macro for storing value.
+
+ * lib.h (set, mut, mpush): Fix wrong-way #if test for these macros.
+ The trivial versions were being defined uner CONFIG_GEN_GC and vice
+ versa!
+
+2012-04-03 Kaz Kylheku <kaz@kylheku.com>
+
+ * eval.c (op_modplace): push replaced with mpush (mutating push).
+
+ * gc.c (gc_push): New function.
+
+ * gc.h (gc_push): Declared.
+
+ * hash.c (pushhash): Use mpush.
+
+ * lib.c (push): Reverted to unsafe operation. TODO comment replaced
+ with warning.
+ (lazy_flatten_scan): push occurence commented as safe.
+ (lazy_stream_func): Unsafe push replaced with mpush.
+
+ * lib.h (mpush): New macro.
+
+2012-04-02 Kaz Kylheku <kaz@kylheku.com>
+
+ * configure: Support a gen-gc configuration variable which
+ causes CONFIG_GEN_GC to be defined as 1 in config.h.
+
+ * eval.c (op_defvar, dwim_loc, op_modplace, transform_op): Handle
+ mutating assignments via set macro.
+ (op_dohash): Inform gc about mutated variables. TODO here.
+
+ * filter.c (trie_add, trie_compress): Handle mutating assignments
+ via set macro.
+
+ * gc.c (BACKPTR_VEC_SIZE, FULL_GC_INTERVAL): New preprocessor symbols.
+ (backptr, backptr_idx, partial_gc_count, full): New static variables.
+ (make_obj): Initialize generation to zero.
+ (gc): Added logic for deciding between full and partial gc.
+ (gc_set, gc_mutated): New functions.
+
+ * gc.h (gc_set, gc_mutated): Declared.
+
+ * hash.c (hash_mark): Changed useless use of vecref_l to vecref.
+ (gethash_f): Use set when assigning through *found since it
+ is a possible mutation.
+
+ * lib.c (car_l, cdr_l, vecref_l): Got rid of loc macro uses. Using the
+ value properly is going to be the caller's responsibility.
+ (push): push may be a mutation, so use set.
+ (intern): Uset set to mutate a hash entry.
+ (acons_new_l, aconsq_new_l): Use set when replacing *list.
+
+ * lib.h (PTR_BIT): New preprocessor symbol.
+ (obj_common): New macro for defining common object fields.
+ type_t is split into two bitfields, half a pointer wide,
+ allowing for generation to be represented.
+ (struct any, struct cons, struct string, struct sym, struct package,
+ struct func, struct vec, struct lazy_cons, struct cobj, struct env,
+ struct bignum, struct flonum): Use obj_common macro to defined
+ common fields.
+ (loc): Macro removed.
+ (set, mut): Macros conditionally defined for real functionality.
+ (list_collect, list_collect_nconc, list_collect_append): Replace
+ mutating operations with set.
+
+ * match.c (dest_set, v_cat, v_output, v_filter): Replace
+ mutating operations with set.
+
+ * stream.c (string_in_get_line, string_in_get_char,
+ strlist_out_put_string, strlist_out_put_char): Replace mutating
+ operations with set.
+
+ * unwind.c (uw_register_subtype): Replace mutating operation with set.
+
+2012-04-02 Kaz Kylheku <kaz@kylheku.com>
+
+ * lib.c (vec_set_length): Use set instead of assignment.
+ (vecref_l): Use loc to lift address of cell.
+ (replace_vec): Use macro mut to indicate the object is being
+ mutated.
+
+ * lib.h (mut): New macro.
+
+2012-04-01 Kaz Kylheku <kaz@kylheku.com>
+
+ Start of ground-work for ephemeral GC. We must add some abstraction
+ to places where we potentially assign a reference to a younger object
+ inside a field located in an older object (chronological
+ backreference) and also where we take the address of an object
+ field, making it possible that the user of the address will do so.
+
+ This patch does not take care of vectors.
+
+ No, this is not an April Fool's joke.
+
+ * eval.c (env_fbind, env_vbind, env_replace_vbind, lookup_var,
+ lookup_sym_lisp1): Use set macro instead of assignment.
+
+ * hash.c (hash_grow, set_hash_userdata, hash_next):
+ Use set macro instead of assignment.
+
+ * lib.c (rplaca, rplacd, string_extend, length_str, replace_str,
+ rehome_sym, lazy_stream_func, lazy_str, lazy_str_force,
+ lazy_str_force_upto, obj_init): Use set macro instead of assignment.
+
+ (car_l, cdr_l): Use loc instead of address-of operator.
+
+ * lib.h (set, loc): New macros.
+
2012-03-31 Kaz Kylheku <kaz@kylheku.com>
* hash.c (last_equal_key, last_equal_hash): New static variables.
diff --git a/configure b/configure
index 705ecaea..a35a0bb1 100755
--- a/configure
+++ b/configure
@@ -139,6 +139,7 @@ valgrind=${valgrind-}
lit_align=${lit_align-}
extra_debugging=${extra_debugging-}
debug_support=${debug_support-y}
+gen_gc=${gen_gc-}
mpi_version=1.8.6
have_quilt=
have_patch=
@@ -348,6 +349,13 @@ extra_debugging [$extra_debugging]
Use --extra_debugging to configure some additional debugging features,
which incur a run-time penalty.
+
+gen_gc [$gen_gc]
+
+ Use --gen-gc to enable the experimental generational garbage collector.
+ This is currently disabled by default: a mark-and-sweep garbage collection
+ strategy is used which performs a full sweep.
+
!
exit 1
fi
@@ -1123,9 +1131,8 @@ fi
# Some final blurbs into config.h
#
-if [ -n "$debug_support" ] ; then
- printf "#define CONFIG_DEBUG_SUPPORT 1\n" >> config.h
-fi
+[ -n "$debug_support" ] && printf "#define CONFIG_DEBUG_SUPPORT 1\n" >> config.h
+[ -n "$gen_gc" ] && printf "#define CONFIG_GEN_GC 1\n" >> config.h
#
# Regenerate config.make
diff --git a/eval.c b/eval.c
index b79b88f1..32be8f2f 100644
--- a/eval.c
+++ b/eval.c
@@ -82,21 +82,21 @@ val make_env(val vbindings, val fbindings, val up_env)
val env_fbind(val env, val sym, val fun)
{
type_check(env, ENV);
- env->e.fbindings = acons_new(sym, fun, env->e.fbindings);
+ set(env->e.fbindings, acons_new(sym, fun, env->e.fbindings));
return sym;
}
val env_vbind(val env, val sym, val obj)
{
type_check(env, ENV);
- env->e.vbindings = acons_new(sym, obj, env->e.vbindings);
+ set(env->e.vbindings, acons_new(sym, obj, env->e.vbindings));
return sym;
}
static void env_replace_vbind(val env, val bindings)
{
type_check(env, ENV);
- env->e.vbindings = bindings;
+ set(env->e.vbindings, bindings);
}
noreturn static val eval_error(val form, val fmt, ...)
@@ -120,7 +120,7 @@ val lookup_var(val env, val sym)
val bind = gethash(top_vb, sym);
if (cobjp(bind)) {
struct c_var *cv = (struct c_var *) cptr_get(bind);
- cv->bind->c.cdr = *cv->loc;
+ set(cv->bind->c.cdr, *cv->loc);
return cv->bind;
}
return bind;
@@ -183,7 +183,7 @@ static val lookup_sym_lisp1(val env, val sym)
val bind = gethash(top_vb, sym);
if (cobjp(bind)) {
struct c_var *cv = (struct c_var *) cptr_get(bind);
- cv->bind->c.cdr = *cv->loc;
+ set(cv->bind->c.cdr, *cv->loc);
return cv->bind;
}
return or2(bind, gethash(top_fb, sym));
@@ -701,7 +701,7 @@ static val op_defvar(val form, val env)
val existing = gethash(top_vb, sym);
if (existing)
- *cdr_l(existing) = value;
+ set(*cdr_l(existing), value);
else
sethash(top_vb, sym, cons(sym, value));
}
@@ -895,7 +895,7 @@ static val *dwim_loc(val form, val env, val op, val newform, val *retval)
loc = gethash_l(obj, first(args), &new_p);
if (new_p)
- *loc = second(args);
+ set(*loc, second(args));
return loc;
}
}
@@ -950,7 +950,7 @@ static val op_modplace(val form, val env)
}
loc = gethash_l(hash, key, &new_p);
if (new_p)
- *loc = eval(fourth(place), env, form);
+ set(*loc, eval(fourth(place), env, form));
} else if (sym == car_s) {
val cons = eval(second(place), env, form);
loc = car_l(cons);
@@ -975,15 +975,15 @@ static val op_modplace(val form, val env)
if (op == set_s) {
if (!third_arg_p)
eval_error(form, lit("~a: missing argument"), op, place, nao);
- return *loc = eval(newform, env, form);
+ return set(*loc, eval(newform, env, form));
} else if (op == inc_s) {
val inc = or2(eval(newform, env, form), one);
- return *loc = plus(*loc, inc);
+ return set(*loc, plus(*loc, inc));
} else if (op == dec_s) {
val inc = or2(eval(newform, env, form), one);
- return *loc = minus(*loc, inc);
+ return set(*loc, minus(*loc, inc));
} else if (op == push_s) {
- return push(newval, loc);
+ return mpush(newval, *loc);
} else if (op == pop_s) {
if (third_arg_p)
eval_error(form, lit("~a: superfluous argument"), op, place, nao);
@@ -1036,6 +1036,12 @@ static val op_dohash(val form, val env)
uw_block_begin (nil, result);
while ((cell = hash_next(&iter)) != nil) {
+ /* These assignments are gc-safe, because keyvar and valvar
+ are newer objects than existing entries in the hash,
+ unless the body mutates hash by inserting newer objects,
+ and also deleting them such that these variables end up
+ with the only reference. But in that case, those objects
+ will be noted in the GC's check list. */
*cdr_l(keyvar) = car(cell);
*cdr_l(valvar) = cdr(cell);
eval_progn(body, new_env, form);
@@ -1501,7 +1507,7 @@ static val transform_op(val forms, val syms, val rg)
val newsyms = syms;
val new_p;
val *place = acons_new_l(vararg, &new_p, &newsyms);
- val sym = if3(new_p, *place = gensym(prefix), *place);
+ val sym = if3(new_p, set(*place, gensym(prefix)), *place);
cons_bind (outsyms, outforms, transform_op(re, newsyms, rg));
return cons(outsyms, rlcp(cons(sym, outforms), outforms));
} else if (eq(vararg, rest_s)) {
diff --git a/filter.c b/filter.c
index 958d4911..0498ced2 100644
--- a/filter.c
+++ b/filter.c
@@ -61,7 +61,7 @@ static val trie_add(val trie, val key, val value)
val newnode_p;
val *loc = gethash_l(node, ch, &newnode_p);
if (newnode_p)
- *loc = make_hash(nil, nil, nil);
+ set(*loc, make_hash(nil, nil, nil));
node = *loc;
}
@@ -90,11 +90,11 @@ static void trie_compress(val *ptrie)
val value = get_hash_userdata(trie);
if (zerop(count)) {
- *ptrie = value;
+ set(*ptrie, value);
} else if (eq(count, one) && nullp(value)) {
val iter = hash_begin(trie);
val cell = hash_next(&iter);
- *ptrie = cons(car(cell), cdr(cell));
+ set(*ptrie, cons(car(cell), cdr(cell)));
trie_compress(cdr_l(*ptrie));
} else {
val cell, iter = hash_begin(trie);
diff --git a/gc.c b/gc.c
index 7df908c6..ebbb59ab 100644
--- a/gc.c
+++ b/gc.c
@@ -44,6 +44,9 @@
#define PROT_STACK_SIZE 1024
#define HEAP_SIZE 16384
+#define CHECKOBJ_VEC_SIZE (2 * HEAP_SIZE)
+#define FULL_GC_INTERVAL 40
+#define FRESHOBJ_VEC_SIZE (2 * HEAP_SIZE)
typedef struct heap {
struct heap *next;
@@ -72,6 +75,14 @@ static val heap_min_bound, heap_max_bound;
int gc_enabled = 1;
+#if CONFIG_GEN_GC
+static val checkobj[CHECKOBJ_VEC_SIZE];
+static int checkobj_idx;
+static val freshobj[FRESHOBJ_VEC_SIZE];
+static int freshobj_idx;
+static int full_gc;
+#endif
+
#if EXTRA_DEBUGGING
static val break_obj;
#endif
@@ -150,8 +161,15 @@ val make_obj(void)
{
int tries;
+#if CONFIG_GEN_GC
+ if (opt_gc_debug || freshobj_idx >= FRESHOBJ_VEC_SIZE) {
+ gc();
+ assert (freshobj_idx < FRESHOBJ_VEC_SIZE);
+ }
+#else
if (opt_gc_debug)
gc();
+#endif
for (tries = 0; tries < 3; tries++) {
if (free_list) {
@@ -165,13 +183,13 @@ val make_obj(void)
if (opt_vg_debug)
VALGRIND_MAKE_MEM_UNDEFINED(ret, sizeof *ret);
#endif
+#if CONFIG_GEN_GC
+ ret->t.gen = 0;
+ freshobj[freshobj_idx++] = ret;
+#endif
return ret;
}
- /* To save cycles, make_obj draws from the free list without
- updating this, but before calling gc, it has to be. */
- free_tail = &free_list;
-
switch (tries) {
case 0: gc(); break;
case 1: more(); break;
@@ -241,6 +259,11 @@ tail_call:
t = obj->t.type;
+#if CONFIG_GEN_GC
+ if (!full_gc && obj->t.gen > 0)
+ return;
+#endif
+
if ((t & REACHABLE) != 0)
return;
@@ -352,7 +375,7 @@ static void mark_mem_region(val *low, val *high)
if (in_heap(maybe_obj)) {
#ifdef HAVE_VALGRIND
if (opt_vg_debug)
- VALGRIND_MAKE_MEM_DEFINED(&maybe_obj->t.type, sizeof maybe_obj->t.type);
+ VALGRIND_MAKE_MEM_DEFINED(maybe_obj, SIZEOF_PTR);
#endif
type_t t = maybe_obj->t.type;
if ((t & FREE) == 0) {
@@ -378,6 +401,18 @@ static void mark(mach_context_t *pmc, val *gc_stack_top)
for (rootloc = prot_stack; rootloc != top; rootloc++)
mark_obj(**rootloc);
+#if CONFIG_GEN_GC
+ /*
+ * Mark the additional objects indicated for marking.
+ */
+ if (!full_gc)
+ {
+ int i;
+ for (i = 0; i < checkobj_idx; i++)
+ mark_obj(checkobj[i]);
+ }
+#endif
+
/*
* Then the machine context
*/
@@ -389,97 +424,155 @@ static void mark(mach_context_t *pmc, val *gc_stack_top)
mark_mem_region(gc_stack_top, gc_stack_bottom);
}
-static int_ptr_t sweep(void)
+static int sweep_one(obj_t *block)
{
- heap_t *heap;
- int gc_dbg = opt_gc_debug;
- int_ptr_t free_count = 0;
#ifdef HAVE_VALGRIND
- int vg_dbg = opt_vg_debug;
+ const int vg_dbg = opt_vg_debug;
#else
- int vg_dbg = 0;
+ const int vg_dbg = 0;
#endif
- for (heap = heap_list; heap != 0; heap = heap->next) {
- obj_t *block, *end;
+#if CONFIG_GEN_GC
+ if (!full_gc && block->t.gen > 0)
+ abort();
+#endif
+
+ if ((block->t.type & (REACHABLE | FREE)) == (REACHABLE | FREE))
+ abort();
+ if (block->t.type & REACHABLE) {
+ block->t.type = (type_t) (block->t.type & ~REACHABLE);
+#if CONFIG_GEN_GC
+ block->t.gen = 1;
+#endif
+ return 0;
+ }
+
+ if (block->t.type & FREE) {
#ifdef HAVE_VALGRIND
if (vg_dbg)
- VALGRIND_MAKE_MEM_DEFINED(&heap->block, sizeof heap->block);
+ VALGRIND_MAKE_MEM_NOACCESS(block, sizeof *block);
#endif
+ return 1;
+ }
- for (block = heap->block, end = heap->block + HEAP_SIZE;
- block < end;
- block++)
- {
- if ((block->t.type & (REACHABLE | FREE)) == (REACHABLE | FREE))
- abort();
+ finalize(block);
+ block->t.type = (type_t) (block->t.type | FREE);
+
+ /* If debugging is turned on, we want to catch instances
+ where a reachable object is wrongly freed. This is difficult
+ to do if the object is recycled soon after.
+ So when debugging is on, the free list is FIFO
+ rather than LIFO, which increases our chances that the
+ code which is still using the object will trip on
+ the freed object before it is recycled. */
+ if (vg_dbg || opt_gc_debug) {
+#ifdef HAVE_VALGRIND
+ if (vg_dbg && free_tail != &free_list)
+ VALGRIND_MAKE_MEM_DEFINED(free_tail, sizeof *free_tail);
+#endif
+ *free_tail = block;
+ block->t.next = nil;
+#ifdef HAVE_VALGRIND
+ if (vg_dbg) {
+ if (free_tail != &free_list)
+ VALGRIND_MAKE_MEM_NOACCESS(free_tail, sizeof *free_tail);
+ VALGRIND_MAKE_MEM_NOACCESS(block, sizeof *block);
+ }
+#endif
+ free_tail = &block->t.next;
+ } else {
+ block->t.next = free_list;
+ free_list = block;
+ }
- if (block->t.type & REACHABLE) {
- block->t.type = (type_t) (block->t.type & ~REACHABLE);
- continue;
- }
+ return 1;
+}
- if (block->t.type & FREE) {
+static int_ptr_t sweep(void)
+{
+ int_ptr_t free_count = 0;
+ heap_t *heap;
#ifdef HAVE_VALGRIND
- if (vg_dbg)
- VALGRIND_MAKE_MEM_NOACCESS(block, sizeof *block);
+ const int vg_dbg = opt_vg_debug;
#endif
- free_count++;
- continue;
- }
- if (0 && gc_dbg) {
- format(std_error, lit("~a: finalizing: "), progname, nao);
- obj_print(block, std_error);
- put_char(chr('\n'), std_error);
- }
- finalize(block);
- block->t.type = (type_t) (block->t.type | FREE);
- free_count++;
- /* If debugging is turned on, we want to catch instances
- where a reachable object is wrongly freed. This is difficult
- to do if the object is recycled soon after.
- So when debugging is on, the free list is FIFO
- rather than LIFO, which increases our chances that the
- code which is still using the object will trip on
- the freed object before it is recycled. */
- if (gc_dbg || vg_dbg) {
-#ifdef HAVE_VALGRIND
- if (vg_dbg && free_tail != &free_list)
- VALGRIND_MAKE_MEM_DEFINED(free_tail, sizeof *free_tail);
+ if (free_list == 0)
+ free_tail = &free_list;
+
+#if CONFIG_GEN_GC
+ if (!full_gc) {
+ int i;
+ /* No need to mark block defined via Valgrind API; everything
+ in the freshobj is an allocated node! */
+ for (i = 0; i < freshobj_idx; i++)
+ free_count += sweep_one(freshobj[i]);
+
+ return free_count;
+ }
#endif
- *free_tail = block;
- block->t.next = nil;
+
+ for (heap = heap_list; heap != 0; heap = heap->next) {
+ obj_t *block, *end;
+
#ifdef HAVE_VALGRIND
- if (vg_dbg) {
- if (free_tail != &free_list)
- VALGRIND_MAKE_MEM_NOACCESS(free_tail, sizeof *free_tail);
- VALGRIND_MAKE_MEM_NOACCESS(block, sizeof *block);
- }
+ if (vg_dbg)
+ VALGRIND_MAKE_MEM_DEFINED(&heap->block, sizeof heap->block);
#endif
- free_tail = &block->t.next;
- } else {
- block->t.next = free_list;
- free_list = block;
- }
+
+ for (block = heap->block, end = heap->block + HEAP_SIZE;
+ block < end;
+ block++)
+ {
+ free_count += sweep_one(block);
}
}
+
return free_count;
}
void gc(void)
{
val gc_stack_top = nil;
+#if CONFIG_GEN_GC
+ int exhausted = (free_list == 0);
+#endif
if (gc_enabled) {
+ int swept;
+#if CONFIG_GEN_GC
+ static int gc_counter;
+ if (++gc_counter >= FULL_GC_INTERVAL) {
+ full_gc = 1;
+ gc_counter = 0;
+ }
+#endif
+
mach_context_t mc;
save_context(mc);
gc_enabled = 0;
mark(&mc, &gc_stack_top);
hash_process_weak();
- if (sweep() < 3 * HEAP_SIZE / 4)
+ swept = sweep();
+#if CONFIG_GEN_GC
+#if 0
+ printf("sweep: freed %d full_gc == %d exhausted == %d\n",
+ (int) swept, full_gc, exhausted);
+#endif
+ if (full_gc && swept < 3 * HEAP_SIZE / 4)
+ more();
+ else if (!full_gc && swept < HEAP_SIZE / 4 && exhausted)
+ more();
+#else
+ if (swept < 3 * HEAP_SIZE / 4)
more();
+#endif
+
+#if CONFIG_GEN_GC
+ checkobj_idx = 0;
+ freshobj_idx = 0;
+ full_gc = 0;
+#endif
gc_enabled = 1;
}
}
@@ -508,11 +601,45 @@ int gc_is_reachable(val obj)
if (!is_ptr(obj))
return 1;
+#if CONFIG_GEN_GC
+ if (!full_gc && obj->t.gen > 0)
+ return 1;
+#endif
+
t = obj->t.type;
return (t & REACHABLE) != 0;
}
+#if CONFIG_GEN_GC
+
+val gc_set(val *ptr, val obj)
+{
+ if (in_malloc_range((mem_t *) ptr) && is_ptr(obj) && obj->t.gen == 0) {
+ if (checkobj_idx >= CHECKOBJ_VEC_SIZE)
+ gc();
+ obj->t.gen = -1;
+ checkobj[checkobj_idx++] = obj;
+ }
+ *ptr = obj;
+ return obj;
+}
+
+val gc_mutated(val obj)
+{
+ if (checkobj_idx >= CHECKOBJ_VEC_SIZE)
+ gc();
+ obj->t.gen = -1;
+ return checkobj[checkobj_idx++] = obj;
+}
+
+val gc_push(val obj, val *plist)
+{
+ return gc_set(plist, cons(obj, *plist));
+}
+
+#endif
+
/*
* Useful functions for gdb'ing.
*/
diff --git a/gc.h b/gc.h
index 0ca2b20a..b1b29e0d 100644
--- a/gc.h
+++ b/gc.h
@@ -23,6 +23,7 @@
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
+
void gc_init(val *stack_bottom);
val prot1(val *loc);
void rel1(val *loc);
@@ -33,8 +34,16 @@ void gc(void);
int gc_state(int);
void gc_mark(val);
int gc_is_reachable(val);
+
+#if CONFIG_GEN_GC
+val gc_set(val *, val);
+val gc_push(val, val *);
+val gc_mutated(val);
+#endif
+
void unmark(void);
void gc_hint_func(val *);
+
#define gc_hint(var) gc_hint_func(&var)
#define REACHABLE 0x100
#define FREE 0x200
diff --git a/hash.c b/hash.c
index 78052119..d0bd4ac5 100644
--- a/hash.c
+++ b/hash.c
@@ -282,10 +282,10 @@ static void hash_mark(val hash)
/* Keys are weak: mark the values only. */
for (i = 0; i < h->modulus; i++) {
val ind = num(i);
- val *pchain = vecref_l(h->table, ind);
+ val chain = vecref(h->table, ind);
val iter;
- for (iter = *pchain; iter != nil; iter = cdr(iter)) {
+ for (iter = chain; iter != nil; iter = cdr(iter)) {
val entry = car(iter);
gc_mark(cdr(entry));
}
@@ -298,10 +298,10 @@ static void hash_mark(val hash)
for (i = 0; i < h->modulus; i++) {
val ind = num(i);
- val *pchain = vecref_l(h->table, ind);
+ val chain = vecref(h->table, ind);
val iter;
- for (iter = *pchain; iter != nil; iter = cdr(iter)) {
+ for (iter = chain; iter != nil; iter = cdr(iter)) {
val entry = car(iter);
gc_mark(car(entry));
}
@@ -332,7 +332,7 @@ static void hash_grow(struct hash *h)
bug_unless (new_modulus > h->modulus);
for (i = 0; i < h->modulus; i++) {
- val conses = *vecref_l(h->table, num(i));
+ val conses = vecref(h->table, num(i));
while (conses) {
val entry = car(conses);
@@ -346,7 +346,7 @@ static void hash_grow(struct hash *h)
}
h->modulus = new_modulus;
- h->table = new_table;
+ set(h->table, new_table);
}
val make_hash(val weak_keys, val weak_vals, val equal_based)
@@ -384,7 +384,7 @@ val *gethash_l(val hash, val key, val *new_p)
val gethash(val hash, val key)
{
struct hash *h = (struct hash *) cobj_handle(hash, hash_s);
- val chain = *vecref_l(h->table, num(h->hash_fun(key) % h->modulus));
+ val chain = vecref(h->table, num(h->hash_fun(key) % h->modulus));
val found = h->assoc_fun(key, chain);
return cdr(found);
}
@@ -392,15 +392,15 @@ val gethash(val hash, val key)
val gethash_f(val hash, val key, val *found)
{
struct hash *h = (struct hash *) cobj_handle(hash, hash_s);
- val chain = *vecref_l(h->table, num(h->hash_fun(key) % h->modulus));
- *found = h->assoc_fun(key, chain);
+ val chain = vecref(h->table, num(h->hash_fun(key) % h->modulus));
+ set(*found, h->assoc_fun(key, chain));
return cdr(*found);
}
val gethash_n(val hash, val key, val notfound_val)
{
struct hash *h = (struct hash *) cobj_handle(hash, hash_s);
- val chain = *vecref_l(h->table, num(h->hash_fun(key) % h->modulus));
+ val chain = vecref(h->table, num(h->hash_fun(key) % h->modulus));
val existing = h->assoc_fun(key, chain);
return if3(existing, cdr(existing), notfound_val);
}
@@ -408,14 +408,14 @@ val gethash_n(val hash, val key, val notfound_val)
val sethash(val hash, val key, val value)
{
val new_p;
- *gethash_l(hash, key, &new_p) = value;
+ set(*gethash_l(hash, key, &new_p), value);
return new_p;
}
val pushhash(val hash, val key, val value)
{
val new_p;
- push(value, gethash_l(hash, key, &new_p));
+ mpush(value, *gethash_l(hash, key, &new_p));
return new_p;
}
@@ -427,7 +427,7 @@ val remhash(val hash, val key)
if (existing) {
val loc = memq(existing, *pchain);
- *pchain = nappend2(ldiff(*pchain, loc), cdr(loc));
+ set(*pchain, nappend2(ldiff(*pchain, loc), cdr(loc)));
h->count--;
bug_unless (h->count >= 0);
}
@@ -451,7 +451,7 @@ val set_hash_userdata(val hash, val data)
{
struct hash *h = (struct hash *) cobj_handle(hash, hash_s);
val olddata = h->userdata;
- h->userdata = data;
+ set(h->userdata, data);
return olddata;
}
@@ -502,7 +502,7 @@ val hash_next(val *iter)
*iter = nil;
return nil;
}
- hi->cons = vecref(h->table, num(hi->chain));
+ set(hi->cons, vecref(h->table, num(hi->chain)));
}
return car(hi->cons);
}
diff --git a/lib.c b/lib.c
index 186a4a8c..52197df1 100644
--- a/lib.c
+++ b/lib.c
@@ -224,9 +224,9 @@ val rplaca(val cons, val new_car)
{
switch (type(cons)) {
case CONS:
- return cons->c.car = new_car;
+ return set(cons->c.car, new_car);
case LCONS:
- return cons->lc.car = new_car;
+ return set(cons->lc.car, new_car);
default:
type_mismatch(lit("~s is not a cons"), cons, nao);
}
@@ -237,9 +237,9 @@ val rplacd(val cons, val new_cdr)
{
switch (type(cons)) {
case CONS:
- return cons->c.cdr = new_cdr;
+ return set(cons->c.cdr, new_cdr);
case LCONS:
- return cons->lc.cdr = new_cdr;
+ return set(cons->lc.cdr, new_cdr);
default:
type_mismatch(lit("~s is not a cons"), cons, nao);
}
@@ -361,6 +361,7 @@ val pop(val *plist)
val push(val value, val *plist)
{
+ /* Unsafe for mutating object fields: use mpush macro. */
return *plist = cons(value, *plist);
}
@@ -757,7 +758,7 @@ static val lazy_flatten_scan(val list, val *escape)
} else if (atom(a)) {
return list;
} else do {
- push(cdr(list), escape);
+ push(cdr(list), escape); /* safe mutation: *escape is a local var */
list = a;
a = car(list);
} while (consp(a));
@@ -929,11 +930,17 @@ val cobj_equal_op(val left, val right)
return eq(left, right);
}
+static mem_t *malloc_low_bound, *malloc_high_bound;
+
mem_t *chk_malloc(size_t size)
{
mem_t *ptr = (mem_t *) malloc(size);
if (size && ptr == 0)
ptr = (mem_t *) oom_realloc(0, size);
+ if (ptr < malloc_low_bound)
+ malloc_low_bound = ptr;
+ else if (ptr + size > malloc_high_bound)
+ malloc_high_bound = ptr + size;
return ptr;
}
@@ -944,6 +951,10 @@ mem_t *chk_calloc(size_t n, size_t size)
ptr = (mem_t *) oom_realloc(0, size);
memset(ptr, 0, n * size);
}
+ if (ptr < malloc_low_bound)
+ malloc_low_bound = ptr;
+ else if (ptr + size > malloc_high_bound)
+ malloc_high_bound = ptr + size;
return ptr;
}
@@ -952,9 +963,18 @@ mem_t *chk_realloc(mem_t *old, size_t size)
mem_t *newptr = (mem_t *) realloc(old, size);
if (size != 0 && newptr == 0)
newptr = oom_realloc(old, size);
+ if (newptr < malloc_low_bound)
+ malloc_low_bound = newptr;
+ else if (newptr + size > malloc_high_bound)
+ malloc_high_bound = newptr + size;
return newptr;
}
+int in_malloc_range(mem_t *ptr)
+{
+ return ptr >= malloc_low_bound && ptr < malloc_high_bound;
+}
+
wchar_t *chk_strdup(const wchar_t *str)
{
size_t nchar = wcslen(str) + 1;
@@ -1431,8 +1451,8 @@ val string_extend(val str, val tail)
str->st.str = (wchar_t *) chk_realloc((mem_t *) str->st.str,
alloc * sizeof *str->st.str);
- str->st.alloc = num(alloc);
- str->st.len = plus(str->st.len, needed);
+ set(str->st.alloc, num(alloc));
+ set(str->st.len, plus(str->st.len, needed));
if (stringp(tail)) {
wmemcpy(str->st.str + len, c_str(tail), c_num(needed) + 1);
@@ -1475,8 +1495,8 @@ val length_str(val str)
}
if (!str->st.len) {
- str->st.len = num(wcslen(str->st.str));
- str->st.alloc = plus(str->st.len, one);
+ set(str->st.len, num(wcslen(str->st.str)));
+ set(str->st.alloc, plus(str->st.len, one));
}
return str->st.len;
}
@@ -1741,7 +1761,7 @@ val replace_str(val str_in, val items, val from, val to)
wmemmove(str_in->st.str + t - c_num(len_diff),
str_in->st.str + t, (l - t) + 1);
- str_in->st.len = minus(len, len_diff);
+ set(str_in->st.len, minus(len, len_diff));
to = plus(from, len_it);
} else if (lt(len_rep, len_it)) {
val len_diff = minus(len_it, len_rep);
@@ -2249,7 +2269,7 @@ val intern(val str, val package)
} else {
val newsym = make_sym(str);
newsym->s.package = package;
- return *place = newsym;
+ return set(*place, newsym);
}
}
@@ -2262,7 +2282,7 @@ static val rehome_sym(val sym, val package)
if (sym->s.package)
remhash(sym->s.package->pk.symhash, symbol_name(sym));
- sym->s.package = package;
+ set(sym->s.package, package);
sethash(package->pk.symhash, symbol_name(sym), sym);
return sym;
}
@@ -3063,7 +3083,7 @@ val vec_set_length(val vec, val length)
val *newvec = (val *) chk_realloc((mem_t *) (vec->v.vec - 2),
(new_alloc + 2) * sizeof *newvec);
vec->v.vec = newvec + 2;
- vec->v.vec[vec_alloc] = num(new_alloc);
+ set(vec->v.vec[vec_alloc], num(new_alloc));
#ifdef HAVE_VALGRIND
vec->v.vec_true_start = newvec;
#endif
@@ -3075,7 +3095,7 @@ val vec_set_length(val vec, val length)
vec->v.vec[i] = nil;
}
- vec->v.vec[vec_length] = length;
+ set(vec->v.vec[vec_length], length);
}
return vec;
@@ -3261,6 +3281,7 @@ val replace_vec(val vec_in, val items, val from, val to)
if (vectorp(items)) {
memcpy(vec_in->v.vec + c_num(from), items->v.vec,
sizeof *vec_in->v.vec * c_num(len_it));
+ mut(vec_in);
} else if (stringp(items)) {
cnum f = c_num(from);
cnum t = c_num(to);
@@ -3276,6 +3297,7 @@ val replace_vec(val vec_in, val items, val from, val to)
for (iter = items; iter && f != t; iter = cdr(iter), f++)
vec_in->v.vec[f] = car(iter);
+ mut(vec_in);
}
return vec_in;
}
@@ -3316,15 +3338,15 @@ static val lazy_stream_func(val env, val lcons)
val next = cdr(env) ? pop(cdr_l(env)) : get_line(stream);
val ahead = get_line(stream);
- lcons->lc.car = next;
- lcons->lc.cdr = if2(ahead, make_lazy_cons(lcons->lc.func));
+ set(lcons->lc.car, next);
+ set(lcons->lc.cdr, if2(ahead, make_lazy_cons(lcons->lc.func)));
lcons->lc.func = nil;
if (!next || !ahead)
close_stream(stream, t);
if (ahead)
- push(ahead, cdr_l(env));
+ mpush(ahead, *cdr_l(env));
return next;
}
@@ -3357,12 +3379,12 @@ val lazy_str(val lst, val term, val limit)
obj->ls.prefix = null_string;
obj->ls.list = nil;
} else {
- obj->ls.prefix = cat_str(list(first(lst), term, nao), nil);
- obj->ls.list = rest(lst);
+ set(obj->ls.prefix, cat_str(list(first(lst), term, nao), nil));
+ set(obj->ls.list, rest(lst));
limit = if2(limit, minus(limit, one));
}
- obj->ls.opts = cons(term, limit);
+ set(obj->ls.opts, cons(term, limit));
return obj;
}
@@ -3376,13 +3398,13 @@ val lazy_str_force(val lstr)
while ((!lim || gt(lim, zero)) && lstr->ls.list) {
val next = pop(&lstr->ls.list);
val term = car(lstr->ls.opts);
- lstr->ls.prefix = cat_str(list(lstr->ls.prefix, next, term, nao), nil);
+ set(lstr->ls.prefix, cat_str(list(lstr->ls.prefix, next, term, nao), nil));
if (lim)
lim = minus(lim, one);
}
if (lim)
- *cdr_l(lstr->ls.opts) = lim;
+ set(*cdr_l(lstr->ls.opts), lim);
return lstr->ls.prefix;
}
@@ -3399,13 +3421,13 @@ val lazy_str_force_upto(val lstr, val index)
{
val next = pop(&lstr->ls.list);
val term = car(lstr->ls.opts);
- lstr->ls.prefix = cat_str(list(lstr->ls.prefix, next, term, nao), nil);
+ set(lstr->ls.prefix, cat_str(list(lstr->ls.prefix, next, term, nao), nil));
if (lim)
lim = minus(lim, one);
}
if (lim)
- *cdr_l(lstr->ls.opts) = lim;
+ set(*cdr_l(lstr->ls.opts), lim);
return lt(index, length_str(lstr->ls.prefix));
}
@@ -3599,7 +3621,7 @@ val acons_new(val key, val value, val list)
val existing = assoc(key, list);
if (existing) {
- *cdr_l(existing) = value;
+ set(*cdr_l(existing), value);
return list;
} else {
return cons(cons(key, value), list);
@@ -3616,7 +3638,7 @@ val *acons_new_l(val key, val *new_p, val *list)
return cdr_l(existing);
} else {
val nc = cons(key, nil);
- *list = cons(nc, *list);
+ set(*list, cons(nc, *list));
if (new_p)
*new_p = t;
return cdr_l(nc);
@@ -3628,7 +3650,7 @@ val aconsq_new(val key, val value, val list)
val existing = assq(key, list);
if (existing) {
- *cdr_l(existing) = value;
+ set(*cdr_l(existing), value);
return list;
} else {
return cons(cons(key, value), list);
@@ -3645,7 +3667,7 @@ val *aconsq_new_l(val key, val *new_p, val *list)
return cdr_l(existing);
} else {
val nc = cons(key, nil);
- *list = cons(nc, *list);
+ set(*list, cons(nc, *list));
if (new_p)
*new_p = t;
return cdr_l(nc);
@@ -3778,7 +3800,11 @@ static val sort_list(val list, val lessfun, val keyfun)
return list;
} else {
val cons2 = cdr(list);
- *cdr_l(cons2) = list;
+ /* This assignent is a dangerous mutation since the list
+ may contain mixtures of old and new objects, and
+ so we could be reversing a newer->older pointer
+ relationship. */
+ set(*cdr_l(cons2), list);
*cdr_l(list) = nil;
return cons2;
}
@@ -3845,8 +3871,13 @@ val sort(val seq, val lessfun, val keyfun)
if (!keyfun)
keyfun = identity_f;
- if (consp(seq))
+ if (consp(seq)) {
+ /* The list could have a mixture of generation 0 and 1
+ objects. Sorting the list could reverse some of the
+ pointers between the generations resulting in a backpointer.
+ Thus we better inform the collector about this object. */
return sort_list(seq, lessfun, keyfun);
+ }
sort_vec(seq, lessfun, keyfun);
return seq;
@@ -4047,8 +4078,8 @@ static void obj_init(void)
*gethash_l(user_package->pk.symhash, nil_string, 0) = nil;
/* t can't be interned, because gethash_l needs t in order to do its job. */
- t = *gethash_l(user_package->pk.symhash, lit("t"), 0) = make_sym(lit("t"));
- t->s.package = user_package;
+ t = set(*gethash_l(user_package->pk.symhash, lit("t"), 0), make_sym(lit("t")));
+ set(t->s.package, user_package);
null = intern(lit("null"), user_package);
cons_s = intern(lit("cons"), user_package);
diff --git a/lib.h b/lib.h
index 8f4fae65..1f8360c1 100644
--- a/lib.h
+++ b/lib.h
@@ -37,6 +37,8 @@ typedef int_ptr_t cnum;
#define NUM_MAX (INT_PTR_MAX/4)
#define NUM_MIN (INT_PTR_MIN/4)
+#define PTR_BIT (SIZEOF_PTR * CHAR_BIT)
+
typedef enum type {
NIL, NUM = TAG_NUM, CHR = TAG_CHR, LIT = TAG_LIT, CONS,
STR, SYM, PKG, FUN, VEC, LCONS, LSTR, COBJ, ENV,
@@ -59,39 +61,48 @@ typedef obj_t *val;
typedef unsigned char mem_t;
+#if CONFIG_GEN_GC
+#define obj_common \
+ type_t type : PTR_BIT/2; \
+ int gen : PTR_BIT/2
+#else
+#define obj_common \
+ type_t type
+#endif
+
struct any {
- type_t type;
+ obj_common;
void *dummy[2];
val next; /* GC free list */
};
struct cons {
- type_t type;
+ obj_common;
val car, cdr;
};
struct string {
- type_t type;
+ obj_common;
wchar_t *str;
val len;
val alloc;
};
struct sym {
- type_t type;
+ obj_common;
val name;
val package;
val value;
};
struct package {
- type_t type;
+ obj_common;
val name;
val symhash;
};
struct func {
- type_t type;
+ obj_common;
unsigned fixparam : 7; /* total non-variadic parameters */
unsigned optargs : 7; /* fixparam - optargs = required args */
unsigned variadic : 1;
@@ -126,7 +137,7 @@ struct func {
enum vecindex { vec_alloc = -2, vec_length = -1 };
struct vec {
- type_t type;
+ obj_common;
/* vec points two elements down */
/* vec[-2] is allocated size */
/* vec[-1] is fill pointer */
@@ -145,7 +156,7 @@ struct vec {
*/
struct lazy_cons {
- type_t type;
+ obj_common;
val car, cdr;
val func; /* when nil, car and cdr are valid */
};
@@ -155,14 +166,14 @@ struct lazy_cons {
* of a list of strings.
*/
struct lazy_string {
- type_t type;
+ obj_common;
val prefix; /* actual string part */
val list; /* remaining list */
val opts; /* ( separator . limit ) */
};
struct cobj {
- type_t type;
+ obj_common;
mem_t *handle;
struct cobj_ops *ops;
val cls;
@@ -185,19 +196,19 @@ void cobj_mark_op(val);
cnum cobj_hash_op(val);
struct env {
- type_t type;
+ obj_common;
val vbindings;
val fbindings;
val up_env;
};
struct bignum {
- type_t type;
+ obj_common;
mp_int mp;
};
struct flonum {
- type_t type;
+ obj_common;
double n;
};
@@ -217,6 +228,17 @@ union obj {
struct flonum fl;
};
+#if CONFIG_GEN_GC
+val gc_set(val *, val);
+#define set(place, val) (gc_set(&(place), val))
+#define mut(obj) (gc_mutated(obj));
+#define mpush(val, place) (gc_push(val, &(place)))
+#else
+#define set(place, val) ((place) = (val))
+#define mut(obj) (obj)
+#define mpush(val, place) (push(val, &(place)))
+#endif
+
INLINE cnum tag(val obj) { return ((cnum) obj) & TAG_MASK; }
INLINE int is_ptr(val obj) { return obj && tag(obj) == TAG_PTR; }
INLINE int is_num(val obj) { return tag(obj) == TAG_NUM; }
@@ -371,6 +393,7 @@ val equal(val left, val right);
mem_t *chk_malloc(size_t size);
mem_t *chk_calloc(size_t n, size_t size);
mem_t *chk_realloc(mem_t *, size_t size);
+int in_malloc_range(mem_t *);
wchar_t *chk_strdup(const wchar_t *str);
val cons(val car, val cdr);
val make_lazy_cons(val func);
@@ -650,7 +673,7 @@ INLINE val eq(val a, val b) { return ((a) == (b) ? t : nil); }
do { \
if (*PTAIL) \
PTAIL = tail(*PTAIL); \
- *PTAIL = cons(OBJ, nil); \
+ set(*PTAIL, cons(OBJ, nil)); \
PTAIL = cdr_l(*PTAIL); \
} while(0)
@@ -659,16 +682,16 @@ INLINE val eq(val a, val b) { return ((a) == (b) ? t : nil); }
if (*PTAIL) { \
PTAIL = tail(*PTAIL); \
} \
- *PTAIL = OBJ; \
+ set(*PTAIL, OBJ); \
} while (0)
#define list_collect_append(PTAIL, OBJ) \
do { \
if (*PTAIL) { \
- *PTAIL = copy_list(*PTAIL); \
+ set(*PTAIL, copy_list(*PTAIL)); \
PTAIL = tail(*PTAIL); \
} \
- *PTAIL = OBJ; \
+ set(*PTAIL, OBJ); \
} while (0)
#define cons_bind(CAR, CDR, CONS) \
diff --git a/match.c b/match.c
index 39a6980d..f90b8838 100644
--- a/match.c
+++ b/match.c
@@ -233,7 +233,7 @@ static val dest_set(val spec, val bindings, val pattern, val value)
sem_error(spec, lit("~s cannot be used as a variable"), pattern, nao);
if (!existing)
sem_error(spec, lit("cannot set unbound variable ~s"), pattern, nao);
- *cdr_l(existing) = value;
+ set(*cdr_l(existing), value);
} else if (consp(pattern)) {
if (first(pattern) == var_s) {
uw_throwf(query_error_s,
@@ -2828,7 +2828,7 @@ static val v_flatten(match_files_ctx *c)
val existing = assoc(sym, c->bindings);
if (existing)
- *cdr_l(existing) = flatten(cdr(existing));
+ set(*cdr_l(existing), flatten(cdr(existing)));
}
}
@@ -2987,7 +2987,7 @@ static val v_cat(match_files_ctx *c)
if (existing) {
val sep = if3(sep_form, txeval(specline, sep_form, c->bindings),
lit(" "));
- *cdr_l(existing) = cat_str(flatten(cdr(existing)), sep);
+ set(*cdr_l(existing), cat_str(flatten(cdr(existing)), sep));
} else {
sem_error(specline, lit("cat: unbound variable ~s"), sym, nao);
}
@@ -3057,9 +3057,9 @@ static val v_output(match_files_ctx *c)
if (existing) {
if (append) {
- *cdr_l(existing) = append2(flatten(cdr(existing)), list_out);
+ set(*cdr_l(existing), append2(flatten(cdr(existing)), list_out));
} else {
- *cdr_l(existing) = list_out;
+ set(*cdr_l(existing), list_out);
}
} else {
c->bindings = acons(into_var, list_out, c->bindings);
@@ -3351,7 +3351,7 @@ static val v_filter(match_files_ctx *c)
if (!existing)
sem_error(specline, lit("filter: variable ~a is unbound"), var, nao);
- *cdr_l(existing) = filter_string_tree(filter, cdr(existing));
+ set(*cdr_l(existing), filter_string_tree(filter, cdr(existing)));
}
uw_env_end;
diff --git a/stream.c b/stream.c
index 46148b7c..e877d200 100644
--- a/stream.c
+++ b/stream.c
@@ -355,7 +355,7 @@ static val string_in_get_line(val stream)
if (lt(pos, length_str(string))) {
val nlpos = find_char(string, pos, chr('\n'));
val result = sub_str(string, pos, nlpos);
- *cdr_l(pair) = nlpos ? plus(nlpos, one) : length_str(string);
+ set(*cdr_l(pair), nlpos ? plus(nlpos, one) : length_str(string));
return result;
}
@@ -369,7 +369,7 @@ static val string_in_get_char(val stream)
val pos = cdr(pair);
if (lt(pos, length_str(string))) {
- *cdr_l(pair) = plus(pos, one);
+ set(*cdr_l(pair), plus(pos, one));
return chr_str(string, pos);
}
@@ -584,8 +584,8 @@ static val strlist_out_put_string(val stream, val str)
strstream = make_string_output_stream();
}
- *car_l(cell) = lines;
- *cdr_l(cell) = strstream;
+ set(*car_l(cell), lines);
+ set(*cdr_l(cell), strstream);
return t;
}
@@ -602,8 +602,8 @@ static val strlist_out_put_char(val stream, val ch)
put_char(ch, strstream);
}
- *car_l(cell) = lines;
- *cdr_l(cell) = strstream;
+ set(*car_l(cell), lines);
+ set(*cdr_l(cell), strstream);
return t;
}
diff --git a/unwind.c b/unwind.c
index a51ef98f..e76ace02 100644
--- a/unwind.c
+++ b/unwind.c
@@ -386,7 +386,7 @@ val uw_register_subtype(val sub, val sup)
/* Make sub an immediate subtype of sup.
If sub already registered, we just repoint it. */
if (sub_entry) {
- *cdr_l(sub_entry) = sup_entry;
+ set(*cdr_l(sub_entry), sup_entry);
} else {
sub_entry = cons(sub, sup_entry);
exception_subtypes = cons(sub_entry, exception_subtypes);
@@ -403,7 +403,9 @@ void uw_continue(uw_frame_t *current, uw_frame_t *cont)
void uw_init(void)
{
- protect(&toplevel_env.ev.func_bindings, &exception_subtypes, (val *) 0);
+ protect(&toplevel_env.ev.func_bindings,
+ &toplevel_env.ev.match_context,
+ &exception_subtypes, (val *) 0);
exception_subtypes = cons(cons(t, nil), exception_subtypes);
uw_register_subtype(type_error_s, error_s);
uw_register_subtype(internal_error_s, error_s);