AmendHub

Download:

jcs

/

subtext

/

amendments

/

236

uthread: Pre-allocate threads, put gaps between each stack

Add a verification function that verifies that each gap hasn't been
written to.

jcs made amendment 236 over 2 years ago
--- uthread.c Sat Jul 16 21:42:21 2022 +++ uthread.c Wed Aug 3 14:25:06 2022 @@ -23,21 +23,72 @@ #include "uthread.h" #include "util.h" -#define STACK_SIZE (1024 * 5) -#define CANARY 0xdeadf00d - jmp_buf uthread_coord_env; -static struct uthread uthreads[NUM_UTHREADS] = { 0 }; +static struct uthread uthreads[NUM_UTHREADS]; struct uthread *uthread_current = NULL; +static Ptr main_stack_gap = NULL; +static char uthread_stack_gap_fill[STACK_GAP]; +unsigned long uthread_ApplLimit = 0; void uthread_begin(struct uthread *uthread); +/* + * | A5 world | + * |-------------------| 0x26234E CurStackBase (stacks_top) + * | main stack | COORDINATOR_STACK_SIZE + * |-------------------| 0x260F4E + * | main stack gap | STACK_GAP + * |-------------------| 0x260C00 (main_stack_gap, upward) + * : (other threads) : + * |-------------------| 0x25DC00 ((STACK_SIZE - STACK_GAP) downward) + * | uthread 1 stack | + * | | + * |-------------------| + * | uthread 1 gap | 0x25D001 (STACK_GAP upward) + * |-------------------| 0x25D000 (downward) + * | uthread 0 stack | + * | | + * |-------------------| + * | uthread 0 gap | 0x25C401 (STACK_GAP upward) + * |-------------------| 0x25C400 ApplLimit (stacks_bottom) + * | | + * | heap | + * | | + * |-------------------| 0x16F5D8 ApplZone + */ void uthread_init(void) { - SetApplLimit(GetApplLimit() - (STACK_SIZE * (NUM_UTHREADS + 1))); + unsigned long stacks_top, stacks_bottom; + short i; + + stacks_top = (unsigned long)CurStackBase; + main_stack_gap = (Ptr)(stacks_top - COORDINATOR_STACK_SIZE - + STACK_GAP); + /* align uthread stacks */ + main_stack_gap = (Ptr)((unsigned long)main_stack_gap - + ((unsigned long)main_stack_gap % 1024)); + + stacks_bottom = (unsigned long)main_stack_gap - + (STACK_SIZE * NUM_UTHREADS); + + uthread_ApplLimit = stacks_bottom; // - (1024 * 5); + SetApplLimit((Ptr)uthread_ApplLimit); if (MemError()) - panic("Failed to SetApplLimit"); + panic("Failed to SetApplLimit to %lu", uthread_ApplLimit); + + /* color in our stack gap */ + memset(main_stack_gap, STACK_GAP_FILL, STACK_GAP); + memset(uthread_stack_gap_fill, STACK_GAP_FILL, STACK_GAP); + + for (i = 0; i < NUM_UTHREADS; i++) { + uthreads[i].id = i; + uthreads[i].state = UTHREAD_STATE_DEAD; + uthreads[i].stack_loc = stacks_bottom + (STACK_SIZE * (i + 1)); + uthreads[i].stack_gap = uthreads[i].stack_loc - STACK_SIZE + 1; + } + + uthread_verify(); } struct uthread * @@ -48,11 +99,14 @@ uthread_add(void *func, void *arg) for (i = 0; i < NUM_UTHREADS; i++) { if (uthreads[i].state != UTHREAD_STATE_DEAD) continue; - - uthreads[i].id = i; + uthreads[i].func = func; uthreads[i].arg = arg; uthreads[i].state = UTHREAD_STATE_SETUP; + + /* color in stack gap */ + memset((Ptr)uthreads[i].stack_gap, STACK_GAP_FILL, STACK_GAP); + return &uthreads[i]; } @@ -63,19 +117,33 @@ void uthread_yield(void) { volatile long magic = CANARY; - + register unsigned long _sp = 0; + + asm { + move.l a7, _sp + }; + if (uthread_current == NULL) panic("uthread_yield not from a thread!"); - + + if ((uthread_current->stack_loc - _sp) > (STACK_SIZE / 2)) + panic("thread %d stack growing too large " + "[SP=0x%lx] [stack=0x%lx] [gap=0x%lx-0x%lx]", + uthread_current->id, _sp, uthread_current->stack_loc, + uthread_current->stack_gap, + uthread_current->stack_gap + STACK_GAP); + if (uthread_current->state != UTHREAD_STATE_SLEEPING) uthread_current->state = UTHREAD_STATE_YIELDING; - + if (setjmp(uthread_current->env) == 0) longjmp(uthread_coord_env, UTHREAD_SETJMP_YIELDED); /* will not return */ if (magic != CANARY) - panic("canary clobbered!"); + panic("uthread stack canary dead!"); + + uthread_verify(); } void @@ -92,16 +160,16 @@ uthread_msleep(unsigned long millis) void uthread_begin(struct uthread *uthread) { - register long stack_offset = STACK_SIZE * (uthread->id + 2); + register unsigned long stack_loc = uthread->stack_loc; asm { - movea.l stack_offset, a0 - sub.w a0, a7 + move.l stack_loc, a7 }; - + uthread->func(uthread, uthread->arg); - - /* uthread variable is probably trashed at this point */ + + uthread_verify(); + /* uthread variable is probably trashed at this point */ if (uthread_current->state == UTHREAD_STATE_REPEAT) uthread_current->state = UTHREAD_STATE_SETUP; else @@ -113,7 +181,8 @@ uthread_begin(struct uthread *uthread) void uthread_coordinate(void) { - short i; + short i, n; + unsigned char *gap; for (i = 0; i < NUM_UTHREADS; i++) { if (uthreads[i].state == UTHREAD_STATE_DEAD) @@ -146,4 +215,25 @@ uthread_coordinate(void) } uthread_current = NULL; + uthread_verify(); +} + +void +uthread_verify(void) +{ + short i; + unsigned char *gap; + + gap = (unsigned char *)main_stack_gap; + if (memcmp(gap, uthread_stack_gap_fill, STACK_GAP) != 0) + panic("coordinator spilled into stack gap"); + + for (i = 0; i < NUM_UTHREADS; i++) { + if (uthreads[i].state == UTHREAD_STATE_DEAD) + continue; + + gap = (unsigned char *)(uthreads[i].stack_gap); + if (memcmp(gap, uthread_stack_gap_fill, STACK_GAP) != 0) + panic("thread %d spilled into stack gap", i); + } } --- uthread.h Sat Jul 16 21:42:35 2022 +++ uthread.h Wed Aug 3 14:25:24 2022 @@ -19,7 +19,12 @@ #ifndef __UTHREAD_H__ #define __UTHREAD_H__ -#define NUM_UTHREADS 5 +#define NUM_UTHREADS 8 +#define COORDINATOR_STACK_SIZE (1024 * 10) +#define STACK_SIZE (1024 * 3) +#define STACK_GAP 255 +#define STACK_GAP_FILL 0xff +#define CANARY 0xdeadf00d enum { UTHREAD_STATE_DEAD, @@ -39,16 +44,21 @@ struct uthread { short state; short id; jmp_buf env; - char *stack; void (*func)(struct uthread *, void *); void *arg; unsigned long sleeping_until; + unsigned long stack_loc; + unsigned long stack_gap; }; +extern struct uthread *uthread_current; +extern unsigned long uthread_ApplLimit; + void uthread_init(void); struct uthread *uthread_add(void *func, void *arg); void uthread_yield(void); void uthread_coordinate(void); void uthread_msleep(unsigned long millis); +void uthread_verify(void); #endif /* __UTHREAD_H__ */