implemented automatic generational processing based on threshold in garbage collection

This commit is contained in:
hyung-hwan 2020-03-28 06:45:36 +00:00
parent 752b1332c9
commit 7d70005984
4 changed files with 122 additions and 84 deletions

View File

@ -405,9 +405,17 @@ struct hawk_rtx_t
struct struct
{ {
hawk_gch_t g[HAWK_GC_NUM_GENS]; /* lists of values under gc management */ /* lists of values under gc management */
hawk_oow_t nv[HAWK_GC_NUM_GENS]; /* number of values in each list */ hawk_gch_t g[HAWK_GC_NUM_GENS];
hawk_oow_t nc[HAWK_GC_NUM_GENS]; /* number of collections performed for each generation */
/*
* ncolls[0] - number of allocation attempt since the last gc
* ncolls[N] - nubmer of collections performed for generation N - 1.
*/
hawk_oow_t ncolls[HAWK_GC_NUM_GENS + 1];
/* threshold to trigger generational collection. */
hawk_oow_t threshold[HAWK_GC_NUM_GENS];
} gc; } gc;
hawk_nde_blk_t* active_block; hawk_nde_blk_t* active_block;

View File

@ -3005,8 +3005,11 @@ HAWK_EXPORT void hawk_rtx_refdownval_nofree (
); );
#define HAWK_RTX_GC_GEN_FULL (HAWK_TYPE_MAX(int))
#define HAWK_RTX_GC_GEN_AUTO (-1)
HAWK_EXPORT void hawk_rtx_gc ( HAWK_EXPORT void hawk_rtx_gc (
hawk_rtx_t* rtx hawk_rtx_t* rtx,
int gen
); );
/** /**

View File

@ -1039,10 +1039,13 @@ static int init_rtx (hawk_rtx_t* rtx, hawk_t* awk, hawk_rio_cbs_t* rio)
rtx->gc.g[i].gc_prev = &rtx->gc.g[i]; rtx->gc.g[i].gc_prev = &rtx->gc.g[i];
/* initialize some counters */ /* initialize some counters */
rtx->gc.nv[i] = 0; rtx->gc.ncolls[i] = 0;
rtx->gc.nc[i] = 0; rtx->gc.threshold[i] = (HAWK_COUNTOF(rtx->gc.g) - i) * 3;
if (i == 0 && rtx->gc.threshold[i] < 100) rtx->gc.threshold[i] = 100; /* minimum threshold for gen 0 is 100 */
} }
rtx->gc.ncolls[i] = 0; /* ncolls is larger than other elements by 1 in size */
rtx->inrec.buf_pos = 0; rtx->inrec.buf_pos = 0;
rtx->inrec.buf_len = 0; rtx->inrec.buf_len = 0;
rtx->inrec.flds = HAWK_NULL; rtx->inrec.flds = HAWK_NULL;
@ -1261,7 +1264,7 @@ static void fini_rtx (hawk_rtx_t* rtx, int fini_globals)
#if defined(HAWK_ENABLE_GC) #if defined(HAWK_ENABLE_GC)
/* collect garbage after having released global variables and named global variables */ /* collect garbage after having released global variables and named global variables */
hawk_rtx_gc (rtx); hawk_rtx_gc (rtx, HAWK_RTX_GC_GEN_FULL);
#endif #endif
/* destroy values in free list */ /* destroy values in free list */
@ -1682,7 +1685,7 @@ hawk_val_t* hawk_rtx_execwithucstrarr (hawk_rtx_t* rtx, const hawk_uch_t* args[]
#if defined(HAWK_ENABLE_GC) #if defined(HAWK_ENABLE_GC)
/* i assume this function is a usual hawk program starter. /* i assume this function is a usual hawk program starter.
* call garbage collection after a whole program finishes */ * call garbage collection after a whole program finishes */
hawk_rtx_gc (rtx); hawk_rtx_gc (rtx, HAWK_RTX_GC_GEN_FULL);
#endif #endif
return v; return v;
@ -1699,7 +1702,7 @@ hawk_val_t* hawk_rtx_execwithbcstrarr (hawk_rtx_t* rtx, const hawk_bch_t* args[]
#if defined(HAWK_ENABLE_GC) #if defined(HAWK_ENABLE_GC)
/* i assume this function is a usual hawk program starter. /* i assume this function is a usual hawk program starter.
* call garbage collection after a whole program finishes */ * call garbage collection after a whole program finishes */
hawk_rtx_gc (rtx); hawk_rtx_gc (rtx, HAWK_RTX_GC_GEN_FULL);
#endif #endif
return v; return v;

View File

@ -111,16 +111,6 @@ BEGIN {
#define GCH_MOVED HAWK_TYPE_MAX(hawk_uintptr_t) #define GCH_MOVED HAWK_TYPE_MAX(hawk_uintptr_t)
#define GCH_UNREACHABLE (GCH_MOVED - 1) #define GCH_UNREACHABLE (GCH_MOVED - 1)
static hawk_val_t* gc_calloc (hawk_rtx_t* rtx, hawk_oow_t size)
{
hawk_gch_t* gch;
gch = (hawk_gch_t*)hawk_rtx_callocmem(rtx, HAWK_SIZEOF(*gch) + size);
if (HAWK_UNLIKELY(!gch)) return HAWK_NULL;
return hawk_gch_to_val(gch);
}
static HAWK_INLINE void gc_chain_gch (hawk_gch_t* list, hawk_gch_t* gch) static HAWK_INLINE void gc_chain_gch (hawk_gch_t* list, hawk_gch_t* gch)
{ {
gch->gc_next = list; gch->gc_next = list;
@ -204,6 +194,7 @@ static void gc_trace_refs (hawk_gch_t* list)
static void gc_dump_refs (hawk_rtx_t* rtx, hawk_gch_t* list) static void gc_dump_refs (hawk_rtx_t* rtx, hawk_gch_t* list)
{ {
hawk_gch_t* gch; hawk_gch_t* gch;
hawk_oow_t count = 0;
gch = list->gc_next; gch = list->gc_next;
while (gch != list) while (gch != list)
@ -211,7 +202,7 @@ static void gc_dump_refs (hawk_rtx_t* rtx, hawk_gch_t* list)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] GCH %p gc_refs %d\n", gch, (int)gch->gc_refs); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] GCH %p gc_refs %d\n", gch, (int)gch->gc_refs);
gch = gch->gc_next; gch = gch->gc_next;
} }
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] all_count => %d\n", (int)rtx->gc.nv[0]); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] dumped %ju values\n", count);
} }
static void gc_move_reachables (hawk_gch_t* list, hawk_gch_t* reachable_list) static void gc_move_reachables (hawk_gch_t* list, hawk_gch_t* reachable_list)
@ -264,6 +255,11 @@ static void gc_move_reachables (hawk_gch_t* list, hawk_gch_t* reachable_list)
} }
} }
static HAWK_INLINE void gc_free_val (hawk_rtx_t* rtx, hawk_val_t* v)
{
hawk_rtx_freemem (rtx, hawk_val_to_gch(v));
}
static void gc_free_unreachables (hawk_rtx_t* rtx, hawk_gch_t* list) static void gc_free_unreachables (hawk_rtx_t* rtx, hawk_gch_t* list)
{ {
hawk_gch_t* gch; hawk_gch_t* gch;
@ -299,47 +295,59 @@ static void gc_free_unreachables (hawk_rtx_t* rtx, hawk_gch_t* list)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] FREEING UNREACHABLE GCH %p gc_refs %zu v_refs %zu\n", gch, gch->gc_refs, hawk_gch_to_val(gch)->v_refs); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] FREEING UNREACHABLE GCH %p gc_refs %zu v_refs %zu\n", gch, gch->gc_refs, hawk_gch_to_val(gch)->v_refs);
#endif #endif
/* do what hawk_rtx_freeval() would do without HAWK_RTX_FREEVAL_GC_PRESERVE */ /* do what hawk_rtx_freeval() would do without HAWK_RTX_FREEVAL_GC_PRESERVE */
rtx->gc.nv[0]--;
gc_unchain_gch (gch); gc_unchain_gch (gch);
hawk_rtx_freemem (rtx, gch); gc_free_val (rtx, hawk_gch_to_val(gch));
} }
} }
static void gc_collect_garbage_in_generation (hawk_rtx_t* rtx, int gen) static HAWK_INLINE void gc_collect_garbage_in_generation (hawk_rtx_t* rtx, int gen)
{ {
hawk_oow_t i, newgen; hawk_oow_t i, newgen;
hawk_gch_t reachable;
#if defined(DEBUG_GC) #if defined(DEBUG_GC)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **started**\n"); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **started - gen %d**\n", gen);
#endif #endif
newgen = (gen < HAWK_COUNTOF(rtx->gc.g) - 1)? (gen + 1): gen; newgen = (gen < HAWK_COUNTOF(rtx->gc.g) - 1)? (gen + 1): gen;
for (i = 0; i < gen; i++) gc_move_all_gchs (&rtx->gc.g[i], &rtx->gc.g[gen]); for (i = 0; i < gen; i++)
{
gc_move_all_gchs (&rtx->gc.g[i], &rtx->gc.g[gen]);
}
gc_trace_refs (&rtx->gc.g[gen]); if (rtx->gc.g[gen].gc_next != &rtx->gc.g[gen])
{
hawk_gch_t reachable;
reachable.gc_prev = &reachable; gc_trace_refs (&rtx->gc.g[gen]);
reachable.gc_next = &reachable;
gc_move_reachables (&rtx->gc.g[gen], &reachable);
/* only unreachables are left in rtx->gc.g[0] */ reachable.gc_prev = &reachable;
#if defined(DEBUG_GC) reachable.gc_next = &reachable;
/*gc_dump_refs (rtx, &rtx->gc.g[0]);*/ gc_move_reachables (&rtx->gc.g[gen], &reachable);
#endif
gc_free_unreachables (rtx, &rtx->gc.g[gen]);
HAWK_ASSERT (rtx->gc.g[gen].gc_next == &rtx->gc.g[gen]);
/* move all reachables back to the main list */ /* only unreachables are left in rtx->gc.g[0] */
gc_move_all_gchs (&reachable, &rtx->gc.g[newgen]); #if defined(DEBUG_GC)
/*gc_dump_refs (rtx, &rtx->gc.g[0]);*/
#endif
gc_free_unreachables (rtx, &rtx->gc.g[gen]);
HAWK_ASSERT (rtx->gc.g[gen].gc_next == &rtx->gc.g[gen]);
/* move all reachables back to the main list */
gc_move_all_gchs (&reachable, &rtx->gc.g[newgen]);
}
/* [NOTE] ncolls is greater than other elements by 1 in size.
* i store the number of collections for gen 0 in ncolls[1].
* so i can avoid some comparison when doing this */
rtx->gc.ncolls[gen + 1]++; /* number of collections done for gen */
rtx->gc.ncolls[gen] = 0; /* reset the number of collections of the previous generation */
rtx->gc.ncolls[0] = 0; /* reset the number of allocations since last gc. this line is redundant if gen is 0. */
#if defined(DEBUG_GC) #if defined(DEBUG_GC)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **ended**\n"); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **ended**\n");
#endif #endif
} }
#if 0 static HAWK_INLINE int gc_collect_garbage_auto (hawk_rtx_t* rtx)
static void gc_collect_garbage (hawk_rtx_t* rtx)
{ {
hawk_oow_t i; hawk_oow_t i;
@ -347,52 +355,56 @@ static void gc_collect_garbage (hawk_rtx_t* rtx)
while (i > 1) while (i > 1)
{ {
--i; --i;
if (rtx->gc.nc[i - 1] > rtx->gc.t[i]) if (rtx->gc.ncolls[i] >= rtx->gc.threshold[i])
{ {
gc_collect_garbage_in_generation(rtx, i); gc_collect_garbage_in_generation (rtx, i);
rtx->gc.nc[i]++; return i;
if (i > 0) rtx->gc.nc[i - 1] = 0;
return;
} }
} }
gc_collect_garbage_in_generation(rtx, 0); gc_collect_garbage_in_generation (rtx, 0);
rtx->gc.nc[0]++; return 0;
} }
#endif
void hawk_rtx_gc (hawk_rtx_t* rtx) void hawk_rtx_gc (hawk_rtx_t* rtx, int gen)
{ {
#if 0 if (gen < 0)
hawk_gch_t reachable; {
gc_collect_garbage_auto (rtx);
#if defined(DEBUG_GC) }
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **started**\n"); else
#endif {
gc_trace_refs (&rtx->gc.g[0]); if (gen >= HAWK_COUNTOF(rtx->gc.g)) gen = HAWK_COUNTOF(rtx->gc.g) - 1;
gc_collect_garbage_in_generation (rtx, gen);
reachable.gc_prev = &reachable; }
reachable.gc_next = &reachable; }
gc_move_reachables (&rtx->gc.g[0], &reachable);
/* only unreachables are left in rtx->gc.g[0] */
#if defined(DEBUG_GC)
/*gc_dump_refs (rtx, &rtx->gc.g[0]);*/
#endif
gc_free_unreachables (rtx, &rtx->gc.g[0]);
HAWK_ASSERT (rtx->gc.g[0].gc_next == &rtx->gc.g[0]);
/* move all reachables back to the main list */
gc_move_all_gchs (&reachable, &rtx->gc.g[0]);
#if defined(DEBUG_GC)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] **ended**\n");
#endif
#else static HAWK_INLINE hawk_val_t* gc_calloc_val (hawk_rtx_t* rtx, hawk_oow_t size)
gc_collect_garbage_in_generation (rtx, HAWK_COUNTOF(rtx->gc.g) - 1); /* full garbage collection */ {
#endif hawk_gch_t* gch;
int gc_gen = 0;
if (HAWK_UNLIKELY(rtx->gc.ncolls[0] >= rtx->gc.threshold[0]))
{
/* invoke generational garbage collection */
gc_gen = gc_collect_garbage_auto(rtx);
}
gch = (hawk_gch_t*)hawk_rtx_callocmem(rtx, HAWK_SIZEOF(*gch) + size);
if (HAWK_UNLIKELY(!gch))
{
if (gc_gen < HAWK_COUNTOF(rtx->gc.g) - 1)
{
/* perform full gc if full gc has not been triggerred at the beginning of this function */
hawk_rtx_gc (rtx, HAWK_COUNTOF(rtx->gc.g) - 1);
}
gch = (hawk_gch_t*)hawk_rtx_callocmem(rtx, HAWK_SIZEOF(*gch) + size);
if (HAWK_UNLIKELY(!gch)) return HAWK_NULL;
}
rtx->gc.ncolls[0]++; /* increment of the number of allocation attempt */
return hawk_gch_to_val(gch);
} }
#endif #endif
@ -742,7 +754,6 @@ hawk_val_t* hawk_rtx_makenstrvalwithbcs (hawk_rtx_t* rtx, const hawk_bcs_t* str)
return hawk_rtx_makenstrvalwithbchars(rtx, str->ptr, str->len); return hawk_rtx_makenstrvalwithbchars(rtx, str->ptr, str->len);
} }
/* --------------------------------------------------------------------- */ /* --------------------------------------------------------------------- */
@ -931,11 +942,14 @@ hawk_val_t* hawk_rtx_makemapval (hawk_rtx_t* rtx)
HAWK_MAP_HASHER_DEFAULT HAWK_MAP_HASHER_DEFAULT
#endif #endif
}; };
#if defined(HAWK_ENABLE_GC)
int retried = 0;
#endif
hawk_val_map_t* val; hawk_val_map_t* val;
#if defined(HAWK_ENABLE_GC) #if defined(HAWK_ENABLE_GC)
hawk_rtx_gc(rtx); retry:
val = (hawk_val_map_t*)gc_calloc(rtx, HAWK_SIZEOF(hawk_val_map_t) + HAWK_SIZEOF(hawk_map_t) + HAWK_SIZEOF(rtx)); val = (hawk_val_map_t*)gc_calloc_val(rtx, HAWK_SIZEOF(hawk_val_map_t) + HAWK_SIZEOF(hawk_map_t) + HAWK_SIZEOF(rtx));
#else #else
val = (hawk_val_map_t*)hawk_rtx_callocmem(rtx, HAWK_SIZEOF(hawk_val_map_t) + HAWK_SIZEOF(hawk_map_t) + HAWK_SIZEOF(rtx)); val = (hawk_val_map_t*)hawk_rtx_callocmem(rtx, HAWK_SIZEOF(hawk_val_map_t) + HAWK_SIZEOF(hawk_map_t) + HAWK_SIZEOF(rtx));
#endif #endif
@ -948,9 +962,21 @@ hawk_rtx_gc(rtx);
val->v_gc = 0; val->v_gc = 0;
val->map = (hawk_map_t*)(val + 1); val->map = (hawk_map_t*)(val + 1);
if (hawk_map_init(val->map, hawk_rtx_getgem(rtx), 256, 70, HAWK_SIZEOF(hawk_ooch_t), 1) <= -1) if (HAWK_UNLIKELY(hawk_map_init(val->map, hawk_rtx_getgem(rtx), 256, 70, HAWK_SIZEOF(hawk_ooch_t), 1) <= -1))
{ {
#if defined(HAWK_ENABLE_GC)
gc_free_val (rtx, (hawk_val_t*)val);
if (HAWK_LIKELY(!retried))
{
/* this map involves non-gc allocatinon, which happens outside gc_calloc_val().
* reattempt to allocate after full gc like gc_calloc_val() */
hawk_rtx_gc (rtx, HAWK_COUNTOF(rtx->gc.g) - 1);
retried = 1;
goto retry;
}
#else
hawk_rtx_freemem (rtx, val); hawk_rtx_freemem (rtx, val);
#endif
return HAWK_NULL; return HAWK_NULL;
} }
*(hawk_rtx_t**)hawk_map_getxtn(val->map) = rtx; *(hawk_rtx_t**)hawk_map_getxtn(val->map) = rtx;
@ -958,7 +984,6 @@ hawk_rtx_gc(rtx);
#if defined(HAWK_ENABLE_GC) #if defined(HAWK_ENABLE_GC)
gc_chain_val (&rtx->gc.g[0], (hawk_val_t*)val); gc_chain_val (&rtx->gc.g[0], (hawk_val_t*)val);
rtx->gc.nv[0]++;
val->v_gc = 1; val->v_gc = 1;
#if defined(DEBUG_GC) #if defined(DEBUG_GC)
hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] MADE GCH %p VAL %p\n", hawk_val_to_gch(val), val); hawk_logbfmt (hawk_rtx_gethawk(rtx), HAWK_LOG_STDERR, "[GC] MADE GCH %p VAL %p\n", hawk_val_to_gch(val), val);
@ -1282,9 +1307,8 @@ void hawk_rtx_freeval (hawk_rtx_t* rtx, hawk_val_t* val, int flags)
hawk_map_fini (((hawk_val_map_t*)val)->map); hawk_map_fini (((hawk_val_map_t*)val)->map);
if (!(flags & HAWK_RTX_FREEVAL_GC_PRESERVE)) if (!(flags & HAWK_RTX_FREEVAL_GC_PRESERVE))
{ {
rtx->gc.nv[0]--;
gc_unchain_val (val); gc_unchain_val (val);
hawk_rtx_freemem (rtx, hawk_val_to_gch(val)); gc_free_val (rtx, val);
} }
#else #else
hawk_map_fini (((hawk_val_map_t*)val)->map); hawk_map_fini (((hawk_val_map_t*)val)->map);