promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/fasload.c,v 9.37 1988/08/15 20:46:15 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/fasload.c,v 9.38 1988/09/29 04:57:52 jinx Exp $
The "fast loader" which reads in and relocates binary files and then
interns symbols. It is called with one argument: the (character
return;
}
\f
+Boolean
+check_primitive_numbers(table, length)
+ fast Pointer *table;
+ fast long length;
+{
+ fast long count, top;
+
+ top = NUMBER_OF_DEFINED_PRIMITIVES();
+ if (length < top)
+ top = length;
+
+ for (count = 0; count < top; count += 1)
+ {
+ if (table[count] != MAKE_PRIMITIVE_OBJECT(0, count))
+ return (false);
+ }
+ /* Is this really correct? Can't this screw up if there
+ were more implemented primitives in the dumping microcode
+ than in the loading microcode and they all fell after the
+ last implemented primitive in the loading microcode?
+ */
+ if (length == top)
+ return (true);
+ for (count = top; count < length; count += 1)
+ {
+ if (table[count] != MAKE_PRIMITIVE_OBJECT(count, top))
+ return (false);
+ }
+ return (true);
+}
+
+extern void get_band_parameters();
+
+void
+get_band_parameters(heap_size, const_size)
+ long *heap_size, *const_size;
+{
+ /* This assumes we have just aborted out of a band load. */
+
+ *heap_size = Heap_Count;
+ *const_size = Const_Count;
+ return;
+}
+\f
extern void Intern();
void
while (Next_Pointer < Stop_At)
{
- switch (Type_Code(*Next_Pointer))
+ switch (OBJECT_TYPE(*Next_Pointer))
{
case TC_MANIFEST_NM_VECTOR:
Next_Pointer += (1 + Get_Integer(*Next_Pointer));
break;
case TC_INTERNED_SYMBOL:
- if (Type_Code(Vector_Ref(*Next_Pointer, SYMBOL_GLOBAL_VALUE)) ==
+ if (OBJECT_TYPE(Vector_Ref(*Next_Pointer, SYMBOL_GLOBAL_VALUE)) ==
TC_BROKEN_HEART)
{
Pointer Old_Symbol;
Make_New_Pointer(TC_BROKEN_HEART, *Next_Pointer));
}
}
- else if (Type_Code(Vector_Ref(*Next_Pointer, SYMBOL_NAME)) ==
+ else if (OBJECT_TYPE(Vector_Ref(*Next_Pointer, SYMBOL_NAME)) ==
TC_BROKEN_HEART)
{
*Next_Pointer =
- Make_New_Pointer(Type_Code(*Next_Pointer),
+ Make_New_Pointer(OBJECT_TYPE(*Next_Pointer),
Fast_Vector_Ref(*Next_Pointer,
SYMBOL_NAME));
}
/*
Magic!
The relocation of compiled code entry points depends on the fact
- that fasdump never dumps a constant section.
+ that fasdump never dumps the compiler utilities vector (which
+ contains entry points used by compiled code to invoke microcode
+ provided utilities, like return_to_interpreter).
If the file is not a band, any pointers into constant space are
pointers into the compiler utilities vector. const_relocation is
Primitive_Table_Length,
from_band_load);
- if (Reloc_Debug)
+ if ((!from_band_load) ||
+ (heap_relocation != ((relocation_type) 0)) ||
+ (const_relocation != ((relocation_type) 0)) ||
+ (stack_relocation != ((relocation_type) 0)) ||
+ (!check_primitive_numbers(load_renumber_table,
+ Primitive_Table_Length)))
{
- printf("heap_relocation = %d = %x; const_relocation = %d = %x\n",
- heap_relocation, heap_relocation,
- const_relocation, const_relocation);
- }
+ /* We need to relocate. Oh well. */
+ if (Reloc_Debug)
+ {
+ printf("heap_relocation = %d = %x; const_relocation = %d = %x\n",
+ heap_relocation, heap_relocation,
+ const_relocation, const_relocation);
+ }
- /*
- Relocate the new data.
+ /*
+ Relocate the new data.
- There are no pointers in the primitive table, thus
- there is no need to relocate it.
- */
+ There are no pointers in the primitive table, thus
+ there is no need to relocate it.
+ */
- Relocate_Block(Orig_Heap, primitive_table);
- Relocate_Block(Orig_Constant, Free_Constant);
+ Relocate_Block(Orig_Heap, primitive_table);
+ Relocate_Block(Orig_Constant, Free_Constant);
+ }
\f
#ifdef BYTE_INVERSION
Finish_String_Inversion();
however, be any file which can be loaded with BINARY-FASLOAD.
*/
+#ifndef start_band_load
+#define start_band_load()
+#endif
+
+#ifndef end_band_load
+#define end_band_load(success, dying) \
+{ \
+ if (success || dying) \
+ { \
+ extern Boolean OS_file_close(); \
+ int i; \
+ \
+ for (i = 0; i < FILE_CHANNELS; i++) \
+ { \
+ if (Channels[i] != NULL) \
+ { \
+ OS_file_close(Channels[i]); \
+ Channels[i] = NULL; \
+ } \
+ } \
+ } \
+}
+#endif
+
DEFINE_PRIMITIVE ("LOAD-BAND", Prim_band_load, 1, 1, 0)
{
extern char *malloc();
extern void compiler_reset();
extern Pointer compiler_utilities;
- jmp_buf swapped_buf, *saved_buf;
- Pointer *saved_free, *saved_free_constant, *saved_stack_pointer;
+ jmp_buf
+ swapped_buf,
+ *saved_buf;
+ Pointer
+ *saved_free,
+ *saved_memtop,
+ *saved_free_constant,
+ *saved_stack_pointer;
long temp, length;
- Pointer result;
+ Pointer result, cutl;
char *band_name;
Primitive_1_Arg();
-
+\f
saved_free = Free;
Free = Heap_Bottom;
+ saved_memtop = MemTop;
+ SET_MEMTOP(Heap_Top);
+
+ start_band_load();
+
saved_free_constant = Free_Constant;
Free_Constant = Constant_Space;
saved_stack_pointer = Stack_Pointer;
Stack_Pointer = Highest_Allocated_Address;
- result = read_file_start(Arg1);
- if (result != PRIM_DONE)
+ temp = read_file_start(Arg1);
+ if (temp != PRIM_DONE)
{
Free = saved_free;
+ SET_MEMTOP(saved_memtop);
Free_Constant = saved_free_constant;
Stack_Pointer = saved_stack_pointer;
+ end_band_load(false, false);
- if (result == PRIM_INTERRUPT)
+ if (temp == PRIM_INTERRUPT)
{
- Primitive_Interrupt();
+ Primitive_Error(ERR_FASL_FILE_TOO_BIG);
}
else
{
- Primitive_Error(result);
+ Primitive_Error(temp);
}
}
\f
temp = setjmp(swapped_buf);
if (temp != 0)
{
- extern char *Error_Names[], *Abort_Names[];
+ extern char
+ *Error_Names[],
+ *Abort_Names[];
if (temp > 0)
{
fprintf(stderr, "band-name = \"%s\".\n", band_name);
free(band_name);
}
+ end_band_load(false, true);
+ Back_To_Eval = saved_buf;
Microcode_Termination(TERM_DISK_RESTORE);
/*NOTREACHED*/
}
}
reload_band_name = band_name;
- History = Make_Dummy_History();
+ /* Reset implementation state paramenters */
+
+ INITIALIZE_INTERRUPTS();
Initialize_Stack();
+ Set_Pure_Top();
+ cutl = Vector_Ref(result, 1);
+ if (cutl != NIL)
+ {
+ compiler_utilities = cutl;
+ compiler_reset(cutl);
+ }
+ else
+ {
+ compiler_initialize(true);
+ }
+ Restore_Fixed_Obj(NIL);
+ Fluid_Bindings = NIL;
+ Current_State_Point = NIL;
+
+ /* Setup initial program */
+
Store_Return(RC_END_OF_COMPUTATION);
Store_Expression(NIL);
Save_Cont();
+
Store_Expression(Vector_Ref(result, 0));
- compiler_utilities = Vector_Ref(result, 1);
- compiler_reset(compiler_utilities);
Store_Env(Make_Non_Pointer(GLOBAL_ENV, GO_TO_GLOBAL));
- Set_Pure_Top();
+
+ /* Clear various interpreter state parameters. */
+
+ Trapping = false;
+ Return_Hook_Address = NULL;
+ History = Make_Dummy_History();
+ Prev_Restore_History_Stacklet = NIL;
+ Prev_Restore_History_Offset = 0;
+
+ end_band_load(true, false);
Band_Load_Hook();
+
+ /* Return in a non-standard way. */
+
PRIMITIVE_ABORT(PRIM_DO_EXPRESSION);
/*NOTREACHED*/
}
Pointer Next;
Count = Get_Integer(Fast_Vector_Ref(String_Chain, STRING_HEADER));
- Count = 4*(Count-2)+Type_Code(String_Chain)-MAGIC_OFFSET;
+ Count = 4*(Count-2)+OBJECT_TYPE(String_Chain)-MAGIC_OFFSET;
if (Reloc_Debug)
{
printf("String at 0x%x: restoring length of %d.\n",
return;
}
- Code = Type_Code(Orig_Pointer[STRING_LENGTH]);
+ Code = OBJECT_TYPE(Orig_Pointer[STRING_LENGTH]);
if (Code == 0) /* Already reversed? */
{
long Count, old_size, new_size, i;
{
int C1, C2, C3, C4;
- C4 = Type_Code(*Pointer_Address) & 0xFF;
+ C4 = OBJECT_TYPE(*Pointer_Address) & 0xFF;
C3 = (((long) *Pointer_Address)>>16) & 0xFF;
C2 = (((long) *Pointer_Address)>>8) & 0xFF;
C1 = ((long) *Pointer_Address) & 0xFF;
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/Attic/fhooks.c,v 9.28 1988/08/15 20:46:39 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/Attic/fhooks.c,v 9.29 1988/09/29 04:58:22 jinx Exp $
*
* This file contains hooks and handles for the new fluid bindings
* scheme for multiprocessors.
#include "lookup.h"
#include "locks.h"
\f
+/* (SET-FLUID-BINDINGS! NEW-BINDINGS)
+ Sets the microcode fluid-bindings variable. Returns the previous value.
+*/
+
+DEFINE_PRIMITIVE("SET-FLUID-BINDINGS!", Prim_set_fluid_bindings, 1)
+{
+ Pointer Result;
+ Primitive_1_Arg();
+
+ if (Arg1 != NIL)
+ Arg_1_Type(TC_LIST);
+
+ Result = Fluid_Bindings;
+ Fluid_Bindings = Arg1;
+ PRIMITIVE_RETURN(Result);
+}
+
+/* (GET-FLUID-BINDINGS NEW-BINDINGS)
+ Gets the microcode fluid-bindings variable.
+*/
+
+DEFINE_PRIMITIVE("GET-FLUID-BINDINGS", Prim_get_fluid_bindings, 0)
+{
+ Primitive_0_Args();
+
+ PRIMITIVE_RETURN(Fluid_Bindings);
+}
+
/* (WITH-SAVED-FLUID-BINDINGS THUNK)
Executes THUNK, then restores the previous fluid bindings.
*/
\f
/* Utilities for the primitives below. */
-Pointer
-*lookup_slot(env, var)
-{
- Pointer *cell, *hunk, value;
- long trap_kind;
-
- hunk = Get_Pointer(var);
- lookup(cell, env, hunk, repeat_slot_lookup);
-
- value = Fetch(cell[0]);
+extern Pointer *lookup_cell();
- if (Type_Code(value) != TC_REFERENCE_TRAP)
- {
- return cell;
- }
+#define lookup_slot(env, var) lookup_cell(Get_Pointer(var), env)
- get_trap_kind(trap_kind, value);
- switch(trap_kind)
- {
- case TRAP_DANGEROUS:
- case TRAP_UNBOUND_DANGEROUS:
- case TRAP_UNASSIGNED_DANGEROUS:
- case TRAP_FLUID_DANGEROUS:
- case TRAP_COMPILER_CACHED_DANGEROUS:
- return deep_lookup(env, hunk[VARIABLE_SYMBOL], hunk);
-
- case TRAP_COMPILER_CACHED:
- case TRAP_FLUID:
- case TRAP_UNBOUND:
- case TRAP_UNASSIGNED:
- return cell;
-
- default:
- Primitive_Error(ERR_ILLEGAL_REFERENCE_TRAP);
- }
-}
-\f
Pointer
new_fluid_binding(cell, value, force)
Pointer *cell;
Lock_Handle set_serializer;
Pointer new_trap_value;
long new_trap_kind, trap_kind;
+ Pointer saved_extension, saved_value;
+ saved_extension = NIL;
new_trap_kind = TRAP_FLUID;
setup_lock(set_serializer, cell);
trap = *cell;
new_trap_value = trap;
- if (Type_Code(trap) == TC_REFERENCE_TRAP)
+ if (OBJECT_TYPE(trap) == TC_REFERENCE_TRAP)
{
get_trap_kind(trap_kind, trap);
switch(trap_kind)
Vector_Set(trap,
TRAP_TAG,
Make_Unsigned_Fixnum(TRAP_FLUID | (trap_kind & 1)));
-
/* Fall through */
+
case TRAP_FLUID:
case TRAP_FLUID_DANGEROUS:
new_trap_kind = -1;
break;
-
+\f
case TRAP_UNBOUND:
case TRAP_UNBOUND_DANGEROUS:
if (!force)
Primitive_Error(ERR_UNBOUND_VARIABLE);
}
/* Fall through */
+
case TRAP_UNASSIGNED:
case TRAP_UNASSIGNED_DANGEROUS:
new_trap_kind = (TRAP_FLUID | (trap_kind & 1));
new_trap_value = UNASSIGNED_OBJECT;
break;
-\f
+
case TRAP_COMPILER_CACHED:
case TRAP_COMPILER_CACHED_DANGEROUS:
- cell = Nth_Vector_Loc(Fast_Vector_Ref(*cell, TRAP_EXTRA),
- TRAP_EXTENSION_CELL);
+ saved_extension = Fast_Vector_Ref(*cell, TRAP_EXTRA);
+ cell = Nth_Vector_Loc(saved_extension, TRAP_EXTENSION_CELL);
update_lock(set_serializer, cell);
+ saved_value = *cell;
+ if (OBJECT_TYPE(saved_value) == TC_REFERENCE_TRAP)
+ {
+ /* No need to recache uuo links, they must already be recached. */
+ saved_extension = NIL;
+ }
goto new_fluid_binding_restart;
default:
*Free++ = new_trap_value;
*cell = trap;
}
+\f
+ if (saved_extension != NIL)
+ {
+ extern long recache_uuo_links();
+ long value;
+
+ value = recache_uuo_links(saved_extension, saved_value);
+ if (value != PRIM_DONE)
+ {
+ remove_lock(set_serializer);
+ if (value == PRIM_INTERRUPT)
+ {
+ Primitive_Interrupt();
+ }
+ else
+ {
+ Primitive_Error(value);
+ }
+ }
+ }
remove_lock(set_serializer);
/* Fluid_Bindings is per processor private. */
Free[CONS_CDR] = value;
Free += 2;
- return NIL;
+ return (NIL);
}
\f
/* (ADD-FLUID-BINDING! ENVIRONMENT SYMBOL-OR-VARIABLE VALUE)
if (Arg1 != GLOBAL_ENV)
Arg_1_Type(TC_ENVIRONMENT);
- switch (Type_Code(Arg2))
+ switch (OBJECT_TYPE(Arg2))
{
- case TC_VARIABLE:
- cell = lookup_slot(Arg1, Arg2);
- break;
-
- case TC_INTERNED_SYMBOL:
- case TC_UNINTERNED_SYMBOL:
- cell = deep_lookup(Arg1, Arg2, fake_variable_object);
- break;
-
- default:
- Primitive_Error(ERR_ARG_2_WRONG_TYPE);
- }
+ /* The next two cases are a temporary fix since compiler doesn't
+ do scode-quote the same way that the interpreter does.
- PRIMITIVE_RETURN(new_fluid_binding(cell, Arg3, false));
-}
-\f
-/* (MAKE-FLUID-BINDING! ENVIRONMENT SYMBOL-OR-VARIABLE VALUE)
- Looks up symbol-or-variable in environment. If it has not been
- fluidized, fluidizes it. A fluid binding with the specified
- value is created in this interpreter's fluid bindings. Unlike
- ADD-FLUID-BINDING!, it is not an error to discover no binding
- for this variable; a fluid binding will be made anyway. This is
- simple in the global case, since there is always a value slot
- available in the symbol itself. If the last frame searched
- in the environment chain is closed (does not have a parent
- and does not allow search of the global environment), an AUX
- binding must be established in the last frame.
-*/
+ Ultimately we need to redesign deep fluid-let support anyway,
+ so this will go away.
+ */
-DEFINE_PRIMITIVE ("MAKE-FLUID-BINDING!", Prim_make_fluid_binding, 3, 3, 0)
-{
- extern Pointer *force_definition();
- Pointer *cell;
- Primitive_3_Args();
+ case TC_LIST:
+ cell = lookup_slot(Arg1, Fast_Vector_Ref(Arg2, CONS_CAR));
+ break;
- if (Arg1 != GLOBAL_ENV)
- Arg_1_Type(TC_ENVIRONMENT);
+ case TC_SCODE_QUOTE:
+ cell = lookup_slot(Arg1, Fast_Vector_Ref(Arg2, SCODE_QUOTE_OBJECT));
+ break;
- switch (Type_Code(Arg2))
- {
case TC_VARIABLE:
cell = lookup_slot(Arg1, Arg2);
break;
default:
Primitive_Error(ERR_ARG_2_WRONG_TYPE);
}
-\f
- if (cell == unbound_trap_object)
- {
- long message;
-
- /* This only happens when global is not allowed,
- only provided for completeness.
- */
-
- cell = force_definition(Arg1,
- ((Type_Code(Arg2) == TC_VARIABLE) ?
- Vector_Ref(Arg2, VARIABLE_SYMBOL) :
- Arg2)
- &message);
-
- if (message != PRIM_DONE)
- {
- if (message == PRIM_INTERRUPT)
- {
- Primitive_Interrupt();
- }
- else
- {
- Primitive_Error(message);
- }
- }
- }
- PRIMITIVE_RETURN(new_fluid_binding(cell, Arg3, true));
+ PRIMITIVE_RETURN(new_fluid_binding(cell, Arg3, false));
}
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/interp.c,v 9.45 1988/08/15 20:50:06 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/interp.c,v 9.46 1988/09/29 04:58:42 jinx Exp $
*
* This file contains the heart of the Scheme Scode
* interpreter
break;
case RC_NORMAL_GC_DONE:
- End_GC_Hook();
+ Val = Fetch_Expression();
if (GC_Space_Needed < 0)
{
/* Paranoia */
Microcode_Termination(TERM_GC_OUT_OF_SPACE);
}
GC_Space_Needed = 0;
- Val = Fetch_Expression();
+ End_GC_Hook();
break;
\f
case RC_PCOMB1_APPLY:
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/Attic/locks.h,v 9.22 1988/08/15 20:51:13 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/Attic/locks.h,v 9.23 1988/09/29 04:59:13 jinx Rel $
Contains everything needed to lock and unlock parts of
the heap, pure/constant space and the like.
- It also contains intercommunication stuff as well. */
+ It also contains intercommunication stuff as well.
+*/
-#define Lock_Handle long * /* Address of lock word */
+typedef long *Lock_Handle; /* Address of lock word */
#define CONTENTION_DELAY 10 /* For "slow" locks, back off */
#define Lock_Cell(Cell) NULL /* Start lock */
#define Unlock_Cell(Cell) /* End lock */
#define Do_Store_No_Lock(To, F) *(To) = F
#define Sleep(How_Long) { } /* Delay for locks, etc. */
-
+#define LOCK_FIRST(cell1, cell2) (cell1 < cell2)
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookprm.c,v 1.2 1988/08/15 20:51:21 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookprm.c,v 1.3 1988/09/29 04:59:28 jinx Rel $
*
* This file contains environment manipulation primitives.
* It makes heavy use of procedures in lookup.c
CHECK_ARG(2, SYMBOL_P); \
} while (0)
-#define lookup_primitive_end(Result) \
+#define lookup_primitive_action(action) \
{ \
- if (Result == PRIM_DONE) \
- PRIMITIVE_RETURN(Val); \
- if (Result == PRIM_INTERRUPT) \
- signal_interrupt_from_primitive(); \
- signal_error_from_primitive(Result); \
+ long result; \
+ \
+ result = (action); \
+ if (result != PRIM_DONE) \
+ { \
+ if (result == PRIM_INTERRUPT) \
+ signal_interrupt_from_primitive(); \
+ else \
+ signal_error_from_primitive(result); \
+ } \
+}
+
+#define lookup_primitive_end(value, action) \
+{ \
+ lookup_primitive_action(action); \
+ PRIMITIVE_RETURN(value); \
}
#define standard_lookup_primitive(action) \
{ \
- long Result; \
- \
lookup_primitive_type_test(); \
- Result = action; \
- lookup_primitive_end(Result); \
+ lookup_primitive_end(Val, action); \
/*NOTREACHED*/ \
}
\f
extract_or_create_cache(frame, sym)
Pointer frame, sym;
{
+ extern Pointer compiler_cache_variable[];
extern long compiler_cache();
Pointer *cell, value;
long trap_kind, result;
- cell = deep_lookup(frame, sym, fake_variable_object);
+ cell = deep_lookup(frame, sym, compiler_cache_variable);
value = Fetch(cell[0]);
if (REFERENCE_TRAP_P(value))
{
break;
}
}
- result = compiler_cache(cell, sym, NIL, 0, TRAP_REFERENCES_LOOKUP);
+ result = compiler_cache(cell, frame, sym, NIL, 0,
+ TRAP_REFERENCES_LOOKUP, true);
if (result != PRIM_DONE)
{
if (result == PRIM_INTERRUPT)
*UNDEFINE*: If undefine is ever implemented, the code below may be
affected. It will have to be rethought.
- NOTE: The following code has NOT been parallelized. It needs thinking.
+ NOTE: The following procedure and extract_or_create_cache have NOT
+ been parallelized. They needs thinking.
*/
DEFINE_PRIMITIVE ("ENVIRONMENT-LINK-NAME", Prim_environment_link_name, 3, 3, 0)
{
extern Pointer *scan_frame();
- extern long compiler_uncache();
Pointer target, source, sym;
- Pointer cache, *cell;
- long result;
+ Pointer cache, *cell, *value_cell;
PRIMITIVE_HEADER (3);
target = ARG_REF (1);
case TRAP_COMPILER_CACHED:
case TRAP_COMPILER_CACHED_DANGEROUS:
{
- long result;
-
if (Vector_Ref(Vector_Ref(value, TRAP_EXTRA), TRAP_EXTENSION_CELL) !=
UNBOUND_OBJECT)
+ {
/* It is bound */
+
signal_error_from_primitive(ERR_BAD_SET);
- result = compiler_uncache(cell, sym);
- if (result != PRIM_DONE)
- {
- if (result == PRIM_INTERRUPT)
- signal_interrupt_from_primitive();
- else
- signal_error_from_primitive(result);
}
+ lookup_primitive_action(compiler_uncache(cell, sym));
+ value_cell = Nth_Vector_Loc(cache, TRAP_EXTENSION_CELL);
+ lookup_primitive_action
+ (compiler_recache(shadowed_value_cell, value_cell, target,
+ sym, Fetch(value_cell[0]), false, true));
Vector_Set(value, TRAP_EXTRA, cache);
PRIMITIVE_RETURN(SHARP_T);
}
signal_error_from_primitive(ERR_ILLEGAL_REFERENCE_TRAP);
}
}
- else
\f
+ else
{
Pointer *trap;
if ((cell != ((Pointer *) NULL)) &&
(Fetch(cell[0]) != DANGEROUS_UNBOUND_OBJECT))
+ {
signal_error_from_primitive(ERR_BAD_SET);
+ }
/* Allocate new trap object. */
Free += 2;
trap[1] = cache;
- /* The Local_Set is done to uncache anything being shadowed. */
+ lookup_primitive_action(extend_frame(target, sym, NIL, target, false));
- result = Local_Set(target, sym, UNASSIGNED_OBJECT);
- if (result != PRIM_DONE)
- {
- if (result == PRIM_INTERRUPT)
- signal_interrupt_from_primitive();
- else
- signal_error_from_primitive(result);
- }
-
if (cell == ((Pointer *) NULL))
{
+ trap[0] = MAKE_UNSIGNED_FIXNUM(TRAP_COMPILER_CACHED);
cell = scan_frame(target, sym, fake_variable_object, 0, true);
if (cell == ((Pointer *) NULL))
signal_error_from_primitive(ERR_BAD_FRAME);
}
-
- switch(Fetch(cell[0]))
+ else
{
- case UNASSIGNED_OBJECT:
- trap[0] = MAKE_UNSIGNED_FIXNUM(TRAP_COMPILER_CACHED);
- break;
+ trap[0] = MAKE_UNSIGNED_FIXNUM(TRAP_COMPILER_CACHED_DANGEROUS);
+ }
- case DANGEROUS_UNASSIGNED_OBJECT:
- trap[0] = MAKE_UNSIGNED_FIXNUM(TRAP_COMPILER_CACHED_DANGEROUS);
- break;
+ if (Fetch(cell[0]) != DANGEROUS_UNBOUND_OBJECT)
+ signal_error_from_primitive(ERR_BAD_FRAME);
- default:
- /* What? */
- signal_error_from_primitive(ERR_BAD_FRAME);
- }
+ value_cell = Nth_Vector_Loc(cache, TRAP_EXTENSION_CELL);
+ lookup_primitive_action
+ (compiler_recache(shadowed_value_cell, value_cell, target,
+ sym, Fetch(value_cell[0]), false, true));
Store(cell[0], Make_Pointer(TC_REFERENCE_TRAP, trap));
PRIMITIVE_RETURN(SHARP_T);
}
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookup.c,v 9.40 1988/08/15 20:51:32 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookup.c,v 9.41 1988/09/29 04:59:45 jinx Rel $
*
* This file contains symbol lookup and modification routines. See
* Hal Abelson for a paper describing and justifying the algorithm.
return (Nth_Vector_Loc(sym, SYMBOL_GLOBAL_VALUE));
}
\f
+/* Shallow lookup performed "out of line" by various procedures.
+ It takes care of invoking deep_lookup when necessary.
+ */
+
+extern Pointer *lookup_cell();
+
+Pointer *
+lookup_cell(hunk, env)
+ Pointer *hunk, env;
+{
+ Pointer *cell, value;
+ long trap_kind;
+
+ lookup(cell, env, hunk, repeat_lookup_cell);
+
+ value = Fetch(cell[0]);
+
+ if (OBJECT_TYPE(value) != TC_REFERENCE_TRAP)
+ {
+ return (cell);
+ }
+
+ get_trap_kind(trap_kind, value);
+ switch(trap_kind)
+ {
+ case TRAP_DANGEROUS:
+ case TRAP_UNBOUND_DANGEROUS:
+ case TRAP_UNASSIGNED_DANGEROUS:
+ case TRAP_FLUID_DANGEROUS:
+ case TRAP_COMPILER_CACHED_DANGEROUS:
+ return (deep_lookup(env, hunk[VARIABLE_SYMBOL], hunk));
+
+ case TRAP_COMPILER_CACHED:
+ case TRAP_FLUID:
+ case TRAP_UNBOUND:
+ case TRAP_UNASSIGNED:
+ return (cell);
+
+ default:
+ return (illegal_trap_object);
+ }
+}
+\f
/* Full lookup end code.
deep_lookup_end handles all the complicated and dangerous cases.
cell is the value cell (supposedly found by deep_lookup). Hunk is
case TRAP_UNASSIGNED_DANGEROUS:
return_value = ERR_UNASSIGNED_VARIABLE;
break;
-
+\f
case TRAP_DANGEROUS:
{
Pointer trap_value;
}
\f
/* Complete assignment finalization.
+
deep_assignment_end handles all dangerous cases, and busts compiled
code operator reference caches as appropriate. It is similar to
deep_lookup_end.
value is the new value for the variable.
force forces an assignment if the variable is unbound. This is
- used for redefinition in the global environment, and for Common
- Lisp style fluid binding, which creates a value cell if there was
- none.
+ used for redefinition in the global environment
Notes on multiprocessor locking:
affect an operation must acquire the same locks and in the same
order, thus if there is no interleaving of these operations, the
result will be correct.
+
+ Important:
+
+ A re-definition can take place before the lock is grabbed in this
+ code and we will be clobbering the wrong cell. To be paranoid we
+ should redo the lookup while we have the cell locked and confirm
+ that this is still valid, but this is hard to do here.
+ Alternatively the lock could be grabbed by the caller and passed as
+ an argument after confirming the correctness of the binding. A
+ third option (the one in place now) is not to worry about this,
+ saying that there is a race condition in the user code and that the
+ definition happened after this assignment. For more precise
+ sequencing, the user should synchronize her/his assignments and
+ definitions her/himself.
+
+ assignment_end suffers from this problem as well.
+
*/
\f
#define RESULT(value) \
update_lock(set_serializer,
Nth_Vector_Loc(saved_extension, TRAP_EXTENSION_CELL));
}
- return_value = recache_uuo_links(saved_extension, saved_value);
+ /* NOTE:
+ recache_uuo_links can take an arbitrary amount of time since
+ there may be an internal lock and the code may have to uncache
+ arbitrarily many links.
+ Deadlock should not occur since both locks are always acquired
+ in the same order.
+ */
+
+ return_value = recache_uuo_links(saved_extension, saved_value);
remove_lock(set_serializer);
if (return_value != PRIM_DONE)
/* This must be done after the assignment lock has been removed,
to avoid potential deadlock.
*/
+
if (uncompile_p)
{
/* The reference was dangerous, uncompile the variable. */
return (return_value);
}
-
+\f
#undef ABORT
#undef REDO
#undef RESULT
#undef UNCOMPILE
-\f
+
/* Simple assignment end.
assignment_end lets deep_assignment_end handle all the hairy cases.
It is similar to lookup_end, but there is some hair for
#define redefinition(cell, value) \
deep_assignment_end(cell, fake_variable_object, value, true)
+long
+definition(cell, value, shadowed_p)
+ Pointer *cell, value;
+ Boolean shadowed_p;
+{
+ if (shadowed_p)
+ return (redefinition(cell, value));
+ else
+ {
+ Lock_Handle set_serializer;
+
+ setup_lock(set_serializer, cell);
+ if (*cell == DANGEROUS_UNBOUND_OBJECT)
+ {
+ *cell = value;
+ remove_lock(set_serializer);
+ return (PRIM_DONE);
+ }
+ else
+ {
+ /* Unfortunate fact of life: This binding will be dangerous
+ even if there was no need, but this is the only way to
+ guarantee consistent values.
+ */
+ remove_lock(set_serializer);
+ return (redefinition(cell, value));
+ }
+ }
+}
+\f
long
dangerize(cell, sym)
fast Pointer *cell;
*Free++ = *cell;
*cell = trap;
remove_lock(set_serializer);
- return (PRIM_DONE);
+ return (simple_uncache(cell, sym));
}
\f
get_trap_kind(temp, *cell);
case TRAP_UNASSIGNED_DANGEROUS:
case TRAP_DANGEROUS:
case TRAP_FLUID_DANGEROUS:
- temp = PRIM_DONE;
break;
case TRAP_COMPILER_CACHED:
case TRAP_COMPILER_CACHED_DANGEROUS:
{
- long compiler_uncache();
-
remove_lock(set_serializer);
return (compiler_uncache(cell, sym));
}
Do_Store_No_Lock
((Nth_Vector_Loc (*cell, TRAP_TAG)),
(Make_Unsigned_Fixnum (TRAP_FLUID_DANGEROUS)));
- temp = PRIM_DONE;
break;
case TRAP_UNBOUND:
*cell = DANGEROUS_UNBOUND_OBJECT;
- temp = PRIM_DONE;
break;
case TRAP_UNASSIGNED:
*cell = DANGEROUS_UNASSIGNED_OBJECT;
- temp = PRIM_DONE;
break;
default:
- temp = ERR_ILLEGAL_REFERENCE_TRAP;
- break;
+ remove_lock(set_serializer);
+ return (ERR_ILLEGAL_REFERENCE_TRAP);
}
remove_lock(set_serializer);
- return (temp);
+ return (simple_uncache(cell, sym));
}
\f
/* The core of the incremental definition mechanism.
+
It takes care of dangerizing any bindings being shadowed by this
- definition, extending the frames appropriately, and uncaching any
+ definition, extending the frames appropriately, and uncaching or
+ recaching (according to the DEFINITION_RECACHES_EAGERLY flag) any
compiled code reference caches which might be affected by the new
definition.
*/
long
-extend_frame(env, sym, value, original_frame_p)
- Pointer env, sym, value;
- Boolean original_frame_p;
+extend_frame(env, sym, value, original_frame, recache_p)
+ Pointer env, sym, value, original_frame;
+ Boolean recache_p;
{
Lock_Handle extension_serializer;
Pointer extension, the_procedure;
*/
if (OBJECT_DATUM(env) != GO_TO_GLOBAL)
{
- return (original_frame_p ? ERR_BAD_FRAME : PRIM_DONE);
+ return ((env == original_frame) ? ERR_BAD_FRAME : PRIM_DONE);
}
- else if (original_frame_p)
+ else if (env == original_frame)
{
return (redefinition(Nth_Vector_Loc(sym, SYMBOL_GLOBAL_VALUE),
value));
long offset;
offset = 1 + Vector_Length(formals) - count;
- if (original_frame_p)
+ if (env == original_frame)
{
return (redefinition(Nth_Vector_Loc(env, offset), value));
}
*/
if (Fetch(scan[0]) == DANGEROUS_UNBOUND_OBJECT)
{
- long compiler_uncache();
long temp;
temp =
sym,
fake_variable_object),
sym);
- if (temp != PRIM_DONE)
+
+ if ((temp != PRIM_DONE) || (env != original_frame))
{
return (temp);
}
+ return shadowing_recache(scan, env, sym, value, true);
}
- if (original_frame_p)
+ if (env == original_frame)
{
return (redefinition(scan, value));
}
temp =
extend_frame(Fast_Vector_Ref(extension, ENV_EXTENSION_PARENT_FRAME),
- sym, NIL, false);
+ sym, NIL, original_frame, recache_p);
if (temp != PRIM_DONE)
{
return (temp);
}
-\f
+
/* Proceed to extend the frame:
- If the frame is the one where the definition is occurring,
put the value in the new value cell.
remove_lock(extension_serializer);
goto redo_aux_lookup;
}
-
+\f
scan = Get_Pointer(extension);
if ((temp + (AUX_LIST_FIRST - 1)) == Get_Integer(scan[VECTOR_LENGTH]))
result = Make_Pointer(TC_LIST, Free);
*Free++ = sym;
- *Free++ = ((original_frame_p) ? value : DANGEROUS_UNBOUND_OBJECT);
+ *Free++ = DANGEROUS_UNBOUND_OBJECT;
scan[temp + AUX_LIST_FIRST] = result;
scan[AUX_LIST_COUNT] = Make_Local_Offset(temp + 1);
+
+ remove_lock(extension_serializer);
+
+ if ((env != original_frame) || (!recache_p))
+ return (PRIM_DONE);
+ else
+ return (shadowing_recache((Free - 1), env, sym, value, false));
}
- remove_lock(extension_serializer);
- return (PRIM_DONE);
}
}
\f
"\n;; Local_Set: defining %s.",
Scheme_String_To_C_String(Vector_Ref(sym, SYMBOL_NAME)));
}
- result = extend_frame(env, sym, value, true);
+ result = extend_frame(env, sym, value, env, true);
Val = sym;
return (result);
}
deep_lookup(previous, symbol, fake_variable_object);
}
\f
+/* Macros to allow multiprocessor interlocking in
+ compiler caching and recaching.
+
+ The defaults are NOPs, but can be overriden by machine dependent
+ include files or config.h
+ */
+
+#ifndef update_uuo_prolog
+#define update_uuo_prolog()
+#endif
+
+#ifndef update_uuo_epilog
+#define update_uuo_epilog()
+#endif
+
+#ifndef compiler_cache_prolog
+#define compiler_cache_prolog()
+#endif
+
+#ifndef compiler_cache_epilog
+#define compiler_cache_epilog()
+#endif
+
+#ifndef compiler_trap_prolog
+#define compiler_trap_prolog()
+#endif
+
+#ifndef compiler_trap_epilog
+#define compiler_trap_epilog()
+#endif
+
+#ifndef compiler_uncache_prolog
+#define compiler_uncache_prolog()
+#endif
+
+#ifndef compiler_uncache_epilog
+#define compiler_uncache_epilog()
+#endif
+
+#ifndef compiler_recache_prolog
+#define compiler_recache_prolog()
+#endif
+
+#ifndef compiler_recache_epilog
+#define compiler_recache_epilog()
+#endif
+\f
/* Fast variable reference mechanism for compiled code.
compiler_cache is the core of the variable caching mechanism.
a fake cache is created and all the assignment references are
updated to point to it.
*/
+\f
+#ifndef PARALLEL_PROCESSOR
+
+#define compiler_cache_consistency_check()
+
+#else /* PARALLEL_PROCESSOR */
+/* The purpose of this code is to avoid a lock gap.
+ A re-definition can take place before the lock is grabbed
+ and we will be caching to the wrong cell.
+ To be paranoid we redo the lookup while we have the
+ cell locked and confim that we still have the correct cell.
+
+ Note that this lookup can be "shallow" since the result of
+ the previous lookup is saved in my_variable. The "shallow"
+ lookup code takes care of performing a deep lookup if the
+ cell has been "dangerized".
+ */
+
+#define compiler_cache_consistency_check() \
+{ \
+ Pointer *new_cell; \
+ \
+ compiler_cache_variable[VARIABLE_SYMBOL] = name; \
+ new_cell = lookup_cell(compiler_cache_variable, env); \
+ if (cell != new_cell) \
+ { \
+ remove_lock(set_serializer); \
+ cell = new_cell; \
+ goto compiler_cache_retry; \
+ } \
+}
+
+#endif /* PARALLEL_PROCESSOR */
+
+extern Pointer compiler_cache_variable[];
extern long compiler_cache();
+Pointer compiler_cache_variable[3];
+\f
long
-compiler_cache(cell, name, block, offset, kind)
+compiler_cache(cell, env, name, block, offset, kind, first_time)
fast Pointer *cell;
- Pointer name, block;
+ Pointer env, name, block;
long offset, kind;
+ Boolean first_time;
{
+ long cache_reference_end();
Lock_Handle set_serializer;
fast Pointer trap, references, extension;
Pointer trap_value, store_trap_tag, store_extension;
long trap_kind, return_value;
-
+
store_trap_tag = NIL;
store_extension = NIL;
trap_kind = TRAP_COMPILER_CACHED;
+compiler_cache_retry:
+
setup_lock(set_serializer, cell);
+ compiler_cache_consistency_check();
+ compiler_cache_prolog();
+
trap = *cell;
trap_value = trap;
\f
break;
default:
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (ERR_ILLEGAL_REFERENCE_TRAP);
}
if (GC_allocate_test(MAXIMUM_CACHE_SIZE))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
Request_GC(MAXIMUM_CACHE_SIZE);
return (PRIM_INTERRUPT);
#if false
/* This is included in the check above. */
- if (GC_allocate_test(7))
+ if (GC_allocate_test(9))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
- Request_GC(7);
+ Request_GC(9);
return (PRIM_INTERRUPT);
}
#endif
/* It is not really from compiled code.
The environment linking stuff wants a cc cache instead.
*/
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (PRIM_DONE);
}
if (GC_allocate_test(4))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
Request_GC(4);
return (PRIM_INTERRUPT);
Make_Unsigned_Fixnum(offset));
if (return_value != PRIM_DONE)
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (return_value);
}
}
\f
- /* Install an extension or a uuo link in the cc block, and remove
- the lock.
- */
+ /* Install an extension or a uuo link in the cc block. */
+
+ return_value = cache_reference_end(kind, extension, store_extension,
+ block, offset, trap_value);
+
+ /* Unlock and return */
+
+ compiler_cache_epilog();
+ remove_lock(set_serializer);
+ return (return_value);
+}
- return_value = PRIM_DONE;
+long
+cache_reference_end(kind, extension, store_extension,
+ block, offset, value)
+ long kind, offset;
+ Pointer extension, store_extension, block, value;
+{
+ extern void
+ store_variable_cache();
+ extern long
+ make_uuo_link(),
+ make_fake_uuo_link();
switch(kind)
{
case TRAP_REFERENCES_ASSIGNMENT:
if (store_extension != NIL)
{
- extern void store_variable_cache();
-
store_variable_cache(store_extension, block, offset);
- break;
+ return (PRIM_DONE);
}
/* Fall through */
case TRAP_REFERENCES_LOOKUP:
- {
- extern void store_variable_cache();
-
store_variable_cache(extension, block, offset);
- break;
- }
+ return (PRIM_DONE);
case TRAP_REFERENCES_OPERATOR:
{
- extern long make_uuo_link(), make_fake_uuo_link();
-
- if (REFERENCE_TRAP_P(trap_value))
+ if (REFERENCE_TRAP_P(value))
{
- return_value = make_fake_uuo_link(extension, block, offset);
+ return (make_fake_uuo_link(extension, block, offset));
}
else
{
- return_value = make_uuo_link(trap_value, extension, block, offset);
+ return (make_uuo_link(value, extension, block, offset));
}
- break;
}
}
-
- remove_lock(set_serializer);
- return (return_value);
+ /*NOTREACHED*/
}
\f
-/* This procedure invokes cache_reference after finding the top-level
+/* This procedure invokes compiler_cache after finding the top-level
value cell associated with (env, name).
*/
long
-compiler_cache_reference(env, name, block, offset, kind)
+compiler_cache_reference(env, name, block, offset, kind, first_time)
Pointer env, name, block;
long offset, kind;
+ Boolean first_time;
{
Pointer *cell;
- cell = deep_lookup(env, name, fake_variable_object);
+ cell = deep_lookup(env, name, compiler_cache_variable);
if (cell == unbound_trap_object)
{
long message;
return (message);
}
}
- return (compiler_cache(cell, name, block, offset, kind));
+ return (compiler_cache(cell, env, name, block, offset, kind, first_time));
}
-
+\f
/* This procedure updates all the references in the cached reference
list pointed at by slot to hold value. It also eliminates "empty"
pairs (pairs whose weakly held block has vanished).
return (PRIM_DONE);
}
\f
+extern Pointer compiled_block_environment();
+
+static long
+ trap_map_table[] = {
+ TRAP_REFERENCES_LOOKUP,
+ TRAP_REFERENCES_ASSIGNMENT,
+ TRAP_REFERENCES_OPERATOR
+ };
+
+#define TRAP_MAP_TABLE_SIZE (sizeof(trap_map_table) / sizeof(long))
+
+#ifndef DEFINITION_RECACHES_EAGERLY
+
/* compiler_uncache_slot uncaches all references in the list pointed
at by slot, and clears the list. If the references are operator
references, a fake compiled procedure which will recache when
- invoke is created and installed.
+ invoked is created and installed.
*/
long
*Free++ = sym;
*Free++ = block;
*Free++ = offset;
+\f
if (kind == TRAP_REFERENCES_OPERATOR)
{
extern long make_fake_uuo_link();
sym is the name of the variable.
*/
-static long trap_map_table[] =
- { TRAP_REFERENCES_LOOKUP,
- TRAP_REFERENCES_ASSIGNMENT,
- TRAP_REFERENCES_OPERATOR};
-
-extern long compiler_uncache();
-
long
compiler_uncache(value_cell, sym)
Pointer *value_cell, sym;
return (PRIM_DONE);
}
+ compiler_uncache_prolog();
+
extension = Fast_Vector_Ref(val, TRAP_EXTRA);
references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
update_lock(set_serializer, Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL));
/* Uncache all of the lists. */
- for (i = 0; i < 3; i++)
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
{
index = trap_map_table[i];
temp = compiler_uncache_slot(Nth_Vector_Loc(references, index),
if (temp != PRIM_DONE)
{
remove_lock(set_serializer);
+ compiler_uncache_epilog();
return (temp);
}
}
/* Remove the clone extension if there is one. */
Fast_Vector_Set(extension, TRAP_EXTENSION_CLONE, NIL);
+ compiler_uncache_epilog();
remove_lock(set_serializer);
return (PRIM_DONE);
}
+
+#endif /* DEFINITION_RECACHES_EAGERLY */
+\f
+#ifdef DEFINITION_RECACHES_EAGERLY
+
+/*
+ compiler_recache is invoked when a redefinition occurs. It
+ recaches (at the definition point) all the references that need to
+ point to the new cell.
+
+ It does this in two phases:
+
+ - First (by means of compiler_recache_split) it splits all
+ references into those that need to be updated and those that do
+ not. This is done by side-effecting the list so that all those
+ that need updating are at the end, and when we actually decide to
+ go ahead, we can just clip it and install it in the new location.
+ compiler_recache_split also counts how many entries are affected,
+ so the total amount of gc space needed can be computed.
+
+ - After checking that there is enough space to proceed, (rather
+ than aborting) it actually does the recaching. It caches to the
+ new location/value by using compiler_recache_slot. Note that the
+ eventual trap extension has already been allocated so the recached
+ links can point to it.
+ */
+
+/* Required by compiler_uncache macro. */
+
+Pointer *shadowed_value_cell = ((Pointer *) NULL);
+
+/* Each extension is a hunk4. */
+
+#define SPACE_PER_EXTENSION 4
+
+/* Trap, extension, and one cache-list hunk. */
+
+#define SPACE_PER_TRAP (2 + SPACE_PER_EXTENSION + 3)
+
+/* 1 Pair and 1 Weak pair.
+ Not really needed since the pairs and weak pairs are reused.
+ */
+
+#define SPACE_PER_ENTRY (2 + 2)
+
+/* Hopefully a conservative guesstimate. */
+
+#ifndef SPACE_PER_LINK /* So it can be overriden from config.h */
+#define SPACE_PER_LINK 10
+#endif SPACE_PER_LINK
+\f
+/* The spaces are 0 because the pairs are reused! If that ever changes,
+ they should all become SPACE_PER_ENTRY + curent value.
+ */
+
+#define SPACE_PER_LOOKUP 0
+#define SPACE_PER_ASSIGNMENT 0
+#define SPACE_PER_OPERATOR (0 + SPACE_PER_LINK)
+
+static long
+ trap_size_table[TRAP_MAP_TABLE_SIZE] = {
+ SPACE_PER_LOOKUP,
+ SPACE_PER_ASSIGNMENT,
+ SPACE_PER_OPERATOR
+ };
+
+static long
+ trap_conflict_table[TRAP_MAP_TABLE_SIZE] = {
+ 0, /* lookup */
+ 1, /* assignment */
+ 1 /* operator */
+ };
+
+Boolean
+environment_ancestor_or_self_p(ancestor, descendant)
+ fast Pointer ancestor, descendant;
+{
+ while (OBJECT_TYPE(descendant) != GLOBAL_ENV)
+ {
+ if (descendant == ancestor)
+ return (true);
+ descendant = Fast_Vector_Ref(Vector_Ref(descendant,
+ ENVIRONMENT_FUNCTION),
+ PROCEDURE_ENVIRONMENT);
+ }
+ return (descendant == ancestor);
+}
+\f
+/* This reorders the entries in slot so that the entries that are
+ not affected by the redefinition appear first, and the affected
+ ones appear last. A pointer to the first affected cell is stored
+ in memoize_cell, and this will be given to compiler_recache_slot
+ in order to avoid recomputing the division.
+
+ Note: There is an implicit assumption throughout that none of the
+ pairs (or weak pairs) are in pure space. If they are, they cannot
+ be sorted or reused.
+ */
+
+long
+compiler_recache_split(slot, sym, definition_env, memoize_cell)
+ fast Pointer *slot;
+ Pointer sym, definition_env, **memoize_cell;
+{
+ fast long count;
+ Pointer weak_pair, block, reference_env, invalid_head;
+ fast Pointer *last_invalid;
+
+ count = 0;
+ last_invalid = &invalid_head;
+
+ while (*slot != NIL)
+ {
+ weak_pair = Fast_Vector_Ref(*slot, CONS_CAR);
+ block = Fast_Vector_Ref(weak_pair, CONS_CAR);
+ if (block == NIL)
+ {
+ *slot = Fast_Vector_Ref(*slot, CONS_CDR);
+ continue;
+ }
+ reference_env = compiled_block_environment(block);
+ if (!environment_ancestor_or_self_p(definition_env, reference_env))
+ {
+ slot = Nth_Vector_Loc(*slot, CONS_CDR);
+ }
+ else
+ {
+ count += 1;
+ *last_invalid = *slot;
+ last_invalid = Nth_Vector_Loc(*slot, CONS_CDR);
+ *slot = *last_invalid;
+ }
+ }
+ *last_invalid = NIL;
+ *memoize_cell = slot;
+ *slot = invalid_head;
+ return (count);
+}
+\f
+/* This recaches the entries pointed out by cell and adds them
+ to the list in slot. It also sets to NIL the contents
+ of cell.
+
+ Note that this reuses the pairs and weak pairs that used to be
+ in cell.
+ */
+
+long
+compiler_recache_slot(extension, sym, kind, slot, cell, value)
+ Pointer extension, sym, value;
+ fast Pointer *slot, *cell;
+ long kind;
+{
+ fast Pointer pair, weak_pair;
+ Pointer clone, tail;
+ long result;
+
+ /* This is NIL if there isn't one.
+ This makes cache_reference_end do the right thing.
+ */
+ clone = Fast_Vector_Ref(extension, TRAP_EXTENSION_CLONE);
+ tail = *slot;
+
+ for (pair = *cell; pair != NULL; pair = *cell)
+ {
+ weak_pair = Fast_Vector_Ref(pair, CONS_CAR);
+ result = cache_reference_end(kind, extension, clone,
+ Fast_Vector_Ref(weak_pair, CONS_CAR),
+ Get_Integer(Fast_Vector_Ref(weak_pair,
+ CONS_CDR)),
+ value);
+ if (result != PRIM_DONE)
+ {
+ /* We are severely screwed.
+ compiler_recache will do the appropriate thing.
+ */
+ *slot = tail;
+ return (result);
+ }
+
+ *slot = pair;
+ slot = Nth_Vector_Loc(pair, CONS_CDR);
+ *cell = *slot;
+ }
+ *slot = tail;
+ return (PRIM_DONE);
+}
+\f
+long
+compiler_recache(old_value_cell, new_value_cell, env, sym, value,
+ shadowed_p, link_p)
+ Pointer *old_value_cell, *new_value_cell, env, sym, value;
+ Boolean shadowed_p, link_p;
+{
+ Lock_Handle set_serializer_1, set_serializer_2;
+ Pointer
+ old_value, references, extension, new_extension, new_trap,
+ *trap_info_table[TRAP_MAP_TABLE_SIZE];
+ long
+ trap_kind, temp, i, index, total_size, total_count, conflict_count;
+
+ setup_locks(set_serializer_1, old_value_cell,
+ set_serializer_2, new_value_cell);
+
+ if ((!link_p) && (*new_value_cell != DANGEROUS_UNBOUND_OBJECT))
+ {
+ /* Another processor has redefined this word in the meantime.
+ The other processor must have recached all the compiled code
+ caches since it is shadowing the same variable.
+ The definition has become a redefinition.
+ */
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (redefinition(new_value_cell, value));
+ }
+
+ old_value = *old_value_cell;
+
+ if (!(REFERENCE_TRAP_P(old_value)))
+ {
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ get_trap_kind(trap_kind, old_value);
+ if ((trap_kind != TRAP_COMPILER_CACHED) &&
+ (trap_kind != TRAP_COMPILER_CACHED_DANGEROUS))
+ {
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ compiler_recache_prolog();
+
+ extension = Fast_Vector_Ref(old_value, TRAP_EXTRA);
+ references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
+ update_lock(set_serializer_1,
+ Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL));
+\f
+ /*
+ Split each slot and compute the amount to allocate.
+ */
+
+ conflict_count = 0;
+ total_size = (link_p ? 0 : SPACE_PER_TRAP);
+ total_count = 0;
+
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
+ {
+ index = trap_map_table[i];
+ temp = compiler_recache_split(Nth_Vector_Loc(references, index),
+ sym, env, &trap_info_table[i]);
+
+ if (temp != 0)
+ {
+ conflict_count += trap_conflict_table[i];
+ total_size += (temp * trap_size_table[i]);
+ total_count += temp;
+ }
+ }
+
+ if (total_count == 0)
+ {
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ if ((conflict_count == 2) &&
+ ((!link_p) ||
+ (new_value_cell[TRAP_EXTENSION_CLONE] == NIL)))
+ {
+ total_size += SPACE_PER_EXTENSION;
+ }
+
+ if (GC_allocate_test(total_size))
+ {
+ /* Unfortunate fact of life: This binding will be dangerous
+ even if there is no need, but this is the only way to
+ guarantee consistent values.
+ */
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ Request_GC(total_size);
+ return (PRIM_INTERRUPT);
+ }
+\f
+ /*
+ Allocate and initialize all the cache structures if necessary.
+ */
+
+ if (link_p)
+ {
+ new_extension = Make_Pointer(TRAP_EXTENSION_TYPE, new_value_cell);
+ references = new_value_cell[TRAP_EXTENSION_REFERENCES];
+ }
+ else
+ {
+ /* The reference trap is created here, but is not installed in the
+ environment structure until the end. The new binding contains
+ a DANGEROUS_UNBOUND_OBJECT so that other parallel lookups will
+ skip this binding.
+ */
+
+ references = Make_Pointer(TRAP_REFERENCES_TYPE, Free);
+
+ *Free++ = NIL;
+ *Free++ = NIL;
+ *Free++ = NIL;
+
+ new_extension = Make_Pointer(TRAP_EXTENSION_TYPE, Free);
+
+ *Free++ = value;
+ *Free++ = sym;
+ *Free++ = NIL;
+ *Free++ = references;
+
+ new_trap = Make_Pointer(TC_REFERENCE_TRAP, Free);
+ *Free++ = Make_Unsigned_Fixnum((shadowed_p ?
+ TRAP_COMPILER_CACHED_DANGEROUS :
+ TRAP_COMPILER_CACHED));
+ *Free++ = new_extension;
+ }
+
+ if ((conflict_count == 2) &&
+ (Vector_Ref(new_extension, TRAP_EXTENSION_CLONE) == NIL))
+ {
+ Pointer clone;
+
+ clone = Make_Pointer(TRAP_EXTENSION_TYPE, Free);
+
+ *Free++ = EXPENSIVE_ASSIGNMENT_OBJECT;
+ *Free++ = sym;
+ *Free++ = new_extension;
+ *Free++ = references;
+ Fast_Vector_Set(new_extension, TRAP_EXTENSION_CLONE, clone);
+ }
+\f
+ /*
+ Now we actually perform the recaching, allocating freely.
+ */
+
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
+ {
+ index = trap_map_table[i];
+ temp = compiler_recache_slot(new_extension, sym, index,
+ Nth_Vector_Loc(references, index),
+ trap_info_table[i],
+ value);
+ if (temp != PRIM_DONE)
+ {
+ extern char *Abort_Names[], *Error_Names[];
+
+ /* We've lost BIG. */
+
+ if (temp == PRIM_INTERRUPT)
+ fprintf(stderr,
+ "\ncompiler_recache: Ran out of guaranteed space!\n");
+ else if (temp > 0)
+ fprintf(stderr,
+ "\ncompiler_recache: Unexpected error value %d (%s)\n",
+ temp, Abort_Names[temp]);
+ else
+ fprintf(stderr,
+ "\ncompiler_recache: Unexpected abort value %d (%s)\n",
+ -temp, Abort_Names[(-temp) - 1]);
+ Microcode_Termination(TERM_EXIT);
+ }
+ }
+
+ if (!link_p)
+ {
+ *new_value_cell = new_trap;
+ }
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (PRIM_DONE);
+}
+
+#endif /* DEFINITION_RECACHES_EAGERLY */
\f
/* recache_uuo_links is invoked when an assignment occurs to a
variable which has cached operator references (uuo links).
fast Pointer *slot;
long return_value;
+ update_uuo_prolog();
references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
slot = Nth_Vector_Loc(references, TRAP_REFERENCES_OPERATOR);
Get_Integer(Fast_Vector_Ref(pair, CONS_CDR)));
if (return_value != PRIM_DONE)
{
+ update_uuo_epilog();
return (return_value);
}
slot = Nth_Vector_Loc(*slot, CONS_CDR);
fix_references(Nth_Vector_Loc(references, TRAP_REFERENCES_ASSIGNMENT),
extension);
}
+ update_uuo_epilog();
return (PRIM_DONE);
}
\f
Otherwise the reference is done normally, and the process continued.
*/
-extern Pointer compiled_block_environment();
-
long
compiler_reference_trap(extension, kind, handler)
Pointer extension;
long offset, temp;
Pointer block;
+try_again:
+
if (Vector_Ref(extension, TRAP_EXTENSION_CELL) != REQUEST_RECACHE_OBJECT)
{
return ((*handler)(Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL),
block = Fast_Vector_Ref(extension, TRAP_EXTENSION_BLOCK);
offset = Get_Integer(Fast_Vector_Ref(extension, TRAP_EXTENSION_OFFSET));
+
+ compiler_trap_prolog();
temp =
compiler_cache_reference(compiled_block_environment(block),
Fast_Vector_Ref(extension, TRAP_EXTENSION_NAME),
- block,
- offset,
- kind);
+ block, offset, kind, false);
+ compiler_trap_epilog();
if (temp != PRIM_DONE)
{
return (temp);
{
case TRAP_REFERENCES_OPERATOR:
{
+
/* Note that this value may cause another operator trap when
invoked, since it may be a uuo-link to an interpreted
- procedure, or to a variable with a trap in it. It should not
- go into a loop however, because the reference will be cached
- to the correct place, so the extension will no longer have a
- REQUEST_RECACHE_OBJECT in it. The first branch in this
- procedure will be taken in this case.
+ procedure, or to a variable with a trap in it. However, it
+ should not go into a loop because the reference should be
+ cached to the correct place, so the extension will no longer
+ have a REQUEST_RECACHE_OBJECT in it. The first branch in
+ this procedure will be taken in this case. On a
+ multiprocessor it may in fact loop if some other processor
+ redefines the variable before we have a chance to invoke the
+ value.
*/
extern Pointer extract_uuo_link();
default:
{
extern Pointer extract_variable_cache();
- Pointer extension;
extension = extract_variable_cache(block, offset);
- return ((*handler)(Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL),
- fake_variable_object));
+ /* This is paranoid on a single processor, but it does not hurt.
+ On a multiprocessor, we need to do it because some other processor
+ may have redefined this variable in the meantime.
+ */
+ goto try_again;
}
}
}
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_LOOKUP));
+ TRAP_REFERENCES_LOOKUP, true));
}
long
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_ASSIGNMENT));
+ TRAP_REFERENCES_ASSIGNMENT, true));
}
long
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_OPERATOR));
+ TRAP_REFERENCES_OPERATOR, true));
}
\f
extern long complr_operator_reference_trap();
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookup.h,v 9.40 1988/05/03 19:21:57 jinx Exp $ */
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/lookup.h,v 9.41 1988/09/29 05:02:21 jinx Exp $ */
/* Macros and declarations for the variable lookup code. */
#define setup_lock(handle, cell) handle = Lock_Cell(cell)
#define remove_lock(handle) Unlock_Cell(handle)
+\f
+/* This should prevent a deadly embrace if whole contiguous
+ regions are locked, rather than individual words.
+ */
-#else
+#define setup_locks(hand1, cel1, hand2, cel2) \
+{ \
+ if (LOCK_FIRST(cel1, cel2)) \
+ { \
+ setup_lock(hand1, cel1); \
+ setup_lock(hand2, cel2); \
+ } \
+ else \
+ { \
+ setup_lock(hand2, cel2); \
+ setup_lock(hand1, cel1); \
+ } \
+}
+
+#define remove_locks(hand1, hand2) \
+{ \
+ remove_lock(hand2); \
+ remove_lock(hand1); \
+}
+
+#else /* not PARALLEL_PROCESSOR */
#define verify(type_code, variable, code, label)
#define verified_offset(variable, code) code
#define setup_lock(handle, cell)
#define remove_lock(ignore)
+#define setup_locks(hand1, cel1, hand2, cel2)
+#define remove_locks(ign1, ign2)
-#endif
+#endif /* PARALLEL_PROCESSOR */
/* This is provided as a separate macro so that it can be made
atomic if necessary.
cell = Nth_Vector_Loc(frame, CONS_CDR); \
break; \
}
+\f
+/* Macros and exports for incremental definition and hooks. */
+
+extern long extend_frame();
+
+#ifndef DEFINITION_RECACHES_EAGERLY
+
+extern long compiler_uncache();
+
+#define simple_uncache(cell, sym) PRIM_DONE
+
+#define shadowing_recache(cell, env, sym, value, shadowed_p) \
+ definition(cell, value, shadowed_p)
+
+#define compiler_recache(old, new, env, sym, val, shadowed_p, link_p) \
+ PRIM_DONE
+
+#else /* DEFINITION_RECACHES_EAGERLY */
+
+extern long compiler_recache();
+
+extern Pointer *shadowed_value_cell;
+
+#define compiler_uncache(cell, sym) \
+ (shadowed_value_cell = cell, PRIM_DONE)
+
+#define simple_uncache(cell, sym) \
+ compiler_uncache(cell, sym)
+
+#define shadowing_recache(cell, env, sym, value, shadowed_p) \
+ compiler_recache(shadowed_value_cell, cell, env, sym, value, \
+ shadowed_p, false)
+
+#endif /* DEFINITION_RECACHES_EAGERLY */
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/utils.c,v 9.38 1988/08/15 20:57:46 cph Exp $ */
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v7/src/microcode/utils.c,v 9.39 1988/09/29 05:03:12 jinx Exp $ */
/* This file contains utilities for interrupts, errors, etc. */
PRIMITIVE_ABORT(PRIM_POP_RETURN);
/*NOTREACHED*/
}
+\f
+extern Pointer Compiler_Get_Fixed_Objects();
+
+Pointer
+Compiler_Get_Fixed_Objects()
+{
+ if (Valid_Fixed_Obj_Vector())
+ return (Get_Fixed_Obj_Slot(Me_Myself));
+ else
+ return (NIL);
+}
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/interp.c,v 9.45 1988/08/15 20:50:06 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/interp.c,v 9.46 1988/09/29 04:58:42 jinx Exp $
*
* This file contains the heart of the Scheme Scode
* interpreter
break;
case RC_NORMAL_GC_DONE:
- End_GC_Hook();
+ Val = Fetch_Expression();
if (GC_Space_Needed < 0)
{
/* Paranoia */
Microcode_Termination(TERM_GC_OUT_OF_SPACE);
}
GC_Space_Needed = 0;
- Val = Fetch_Expression();
+ End_GC_Hook();
break;
\f
case RC_PCOMB1_APPLY:
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/lookup.c,v 9.40 1988/08/15 20:51:32 cph Exp $
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/lookup.c,v 9.41 1988/09/29 04:59:45 jinx Rel $
*
* This file contains symbol lookup and modification routines. See
* Hal Abelson for a paper describing and justifying the algorithm.
return (Nth_Vector_Loc(sym, SYMBOL_GLOBAL_VALUE));
}
\f
+/* Shallow lookup performed "out of line" by various procedures.
+ It takes care of invoking deep_lookup when necessary.
+ */
+
+extern Pointer *lookup_cell();
+
+Pointer *
+lookup_cell(hunk, env)
+ Pointer *hunk, env;
+{
+ Pointer *cell, value;
+ long trap_kind;
+
+ lookup(cell, env, hunk, repeat_lookup_cell);
+
+ value = Fetch(cell[0]);
+
+ if (OBJECT_TYPE(value) != TC_REFERENCE_TRAP)
+ {
+ return (cell);
+ }
+
+ get_trap_kind(trap_kind, value);
+ switch(trap_kind)
+ {
+ case TRAP_DANGEROUS:
+ case TRAP_UNBOUND_DANGEROUS:
+ case TRAP_UNASSIGNED_DANGEROUS:
+ case TRAP_FLUID_DANGEROUS:
+ case TRAP_COMPILER_CACHED_DANGEROUS:
+ return (deep_lookup(env, hunk[VARIABLE_SYMBOL], hunk));
+
+ case TRAP_COMPILER_CACHED:
+ case TRAP_FLUID:
+ case TRAP_UNBOUND:
+ case TRAP_UNASSIGNED:
+ return (cell);
+
+ default:
+ return (illegal_trap_object);
+ }
+}
+\f
/* Full lookup end code.
deep_lookup_end handles all the complicated and dangerous cases.
cell is the value cell (supposedly found by deep_lookup). Hunk is
case TRAP_UNASSIGNED_DANGEROUS:
return_value = ERR_UNASSIGNED_VARIABLE;
break;
-
+\f
case TRAP_DANGEROUS:
{
Pointer trap_value;
}
\f
/* Complete assignment finalization.
+
deep_assignment_end handles all dangerous cases, and busts compiled
code operator reference caches as appropriate. It is similar to
deep_lookup_end.
value is the new value for the variable.
force forces an assignment if the variable is unbound. This is
- used for redefinition in the global environment, and for Common
- Lisp style fluid binding, which creates a value cell if there was
- none.
+ used for redefinition in the global environment
Notes on multiprocessor locking:
affect an operation must acquire the same locks and in the same
order, thus if there is no interleaving of these operations, the
result will be correct.
+
+ Important:
+
+ A re-definition can take place before the lock is grabbed in this
+ code and we will be clobbering the wrong cell. To be paranoid we
+ should redo the lookup while we have the cell locked and confirm
+ that this is still valid, but this is hard to do here.
+ Alternatively the lock could be grabbed by the caller and passed as
+ an argument after confirming the correctness of the binding. A
+ third option (the one in place now) is not to worry about this,
+ saying that there is a race condition in the user code and that the
+ definition happened after this assignment. For more precise
+ sequencing, the user should synchronize her/his assignments and
+ definitions her/himself.
+
+ assignment_end suffers from this problem as well.
+
*/
\f
#define RESULT(value) \
update_lock(set_serializer,
Nth_Vector_Loc(saved_extension, TRAP_EXTENSION_CELL));
}
- return_value = recache_uuo_links(saved_extension, saved_value);
+ /* NOTE:
+ recache_uuo_links can take an arbitrary amount of time since
+ there may be an internal lock and the code may have to uncache
+ arbitrarily many links.
+ Deadlock should not occur since both locks are always acquired
+ in the same order.
+ */
+
+ return_value = recache_uuo_links(saved_extension, saved_value);
remove_lock(set_serializer);
if (return_value != PRIM_DONE)
/* This must be done after the assignment lock has been removed,
to avoid potential deadlock.
*/
+
if (uncompile_p)
{
/* The reference was dangerous, uncompile the variable. */
return (return_value);
}
-
+\f
#undef ABORT
#undef REDO
#undef RESULT
#undef UNCOMPILE
-\f
+
/* Simple assignment end.
assignment_end lets deep_assignment_end handle all the hairy cases.
It is similar to lookup_end, but there is some hair for
#define redefinition(cell, value) \
deep_assignment_end(cell, fake_variable_object, value, true)
+long
+definition(cell, value, shadowed_p)
+ Pointer *cell, value;
+ Boolean shadowed_p;
+{
+ if (shadowed_p)
+ return (redefinition(cell, value));
+ else
+ {
+ Lock_Handle set_serializer;
+
+ setup_lock(set_serializer, cell);
+ if (*cell == DANGEROUS_UNBOUND_OBJECT)
+ {
+ *cell = value;
+ remove_lock(set_serializer);
+ return (PRIM_DONE);
+ }
+ else
+ {
+ /* Unfortunate fact of life: This binding will be dangerous
+ even if there was no need, but this is the only way to
+ guarantee consistent values.
+ */
+ remove_lock(set_serializer);
+ return (redefinition(cell, value));
+ }
+ }
+}
+\f
long
dangerize(cell, sym)
fast Pointer *cell;
*Free++ = *cell;
*cell = trap;
remove_lock(set_serializer);
- return (PRIM_DONE);
+ return (simple_uncache(cell, sym));
}
\f
get_trap_kind(temp, *cell);
case TRAP_UNASSIGNED_DANGEROUS:
case TRAP_DANGEROUS:
case TRAP_FLUID_DANGEROUS:
- temp = PRIM_DONE;
break;
case TRAP_COMPILER_CACHED:
case TRAP_COMPILER_CACHED_DANGEROUS:
{
- long compiler_uncache();
-
remove_lock(set_serializer);
return (compiler_uncache(cell, sym));
}
Do_Store_No_Lock
((Nth_Vector_Loc (*cell, TRAP_TAG)),
(Make_Unsigned_Fixnum (TRAP_FLUID_DANGEROUS)));
- temp = PRIM_DONE;
break;
case TRAP_UNBOUND:
*cell = DANGEROUS_UNBOUND_OBJECT;
- temp = PRIM_DONE;
break;
case TRAP_UNASSIGNED:
*cell = DANGEROUS_UNASSIGNED_OBJECT;
- temp = PRIM_DONE;
break;
default:
- temp = ERR_ILLEGAL_REFERENCE_TRAP;
- break;
+ remove_lock(set_serializer);
+ return (ERR_ILLEGAL_REFERENCE_TRAP);
}
remove_lock(set_serializer);
- return (temp);
+ return (simple_uncache(cell, sym));
}
\f
/* The core of the incremental definition mechanism.
+
It takes care of dangerizing any bindings being shadowed by this
- definition, extending the frames appropriately, and uncaching any
+ definition, extending the frames appropriately, and uncaching or
+ recaching (according to the DEFINITION_RECACHES_EAGERLY flag) any
compiled code reference caches which might be affected by the new
definition.
*/
long
-extend_frame(env, sym, value, original_frame_p)
- Pointer env, sym, value;
- Boolean original_frame_p;
+extend_frame(env, sym, value, original_frame, recache_p)
+ Pointer env, sym, value, original_frame;
+ Boolean recache_p;
{
Lock_Handle extension_serializer;
Pointer extension, the_procedure;
*/
if (OBJECT_DATUM(env) != GO_TO_GLOBAL)
{
- return (original_frame_p ? ERR_BAD_FRAME : PRIM_DONE);
+ return ((env == original_frame) ? ERR_BAD_FRAME : PRIM_DONE);
}
- else if (original_frame_p)
+ else if (env == original_frame)
{
return (redefinition(Nth_Vector_Loc(sym, SYMBOL_GLOBAL_VALUE),
value));
long offset;
offset = 1 + Vector_Length(formals) - count;
- if (original_frame_p)
+ if (env == original_frame)
{
return (redefinition(Nth_Vector_Loc(env, offset), value));
}
*/
if (Fetch(scan[0]) == DANGEROUS_UNBOUND_OBJECT)
{
- long compiler_uncache();
long temp;
temp =
sym,
fake_variable_object),
sym);
- if (temp != PRIM_DONE)
+
+ if ((temp != PRIM_DONE) || (env != original_frame))
{
return (temp);
}
+ return shadowing_recache(scan, env, sym, value, true);
}
- if (original_frame_p)
+ if (env == original_frame)
{
return (redefinition(scan, value));
}
temp =
extend_frame(Fast_Vector_Ref(extension, ENV_EXTENSION_PARENT_FRAME),
- sym, NIL, false);
+ sym, NIL, original_frame, recache_p);
if (temp != PRIM_DONE)
{
return (temp);
}
-\f
+
/* Proceed to extend the frame:
- If the frame is the one where the definition is occurring,
put the value in the new value cell.
remove_lock(extension_serializer);
goto redo_aux_lookup;
}
-
+\f
scan = Get_Pointer(extension);
if ((temp + (AUX_LIST_FIRST - 1)) == Get_Integer(scan[VECTOR_LENGTH]))
result = Make_Pointer(TC_LIST, Free);
*Free++ = sym;
- *Free++ = ((original_frame_p) ? value : DANGEROUS_UNBOUND_OBJECT);
+ *Free++ = DANGEROUS_UNBOUND_OBJECT;
scan[temp + AUX_LIST_FIRST] = result;
scan[AUX_LIST_COUNT] = Make_Local_Offset(temp + 1);
+
+ remove_lock(extension_serializer);
+
+ if ((env != original_frame) || (!recache_p))
+ return (PRIM_DONE);
+ else
+ return (shadowing_recache((Free - 1), env, sym, value, false));
}
- remove_lock(extension_serializer);
- return (PRIM_DONE);
}
}
\f
"\n;; Local_Set: defining %s.",
Scheme_String_To_C_String(Vector_Ref(sym, SYMBOL_NAME)));
}
- result = extend_frame(env, sym, value, true);
+ result = extend_frame(env, sym, value, env, true);
Val = sym;
return (result);
}
deep_lookup(previous, symbol, fake_variable_object);
}
\f
+/* Macros to allow multiprocessor interlocking in
+ compiler caching and recaching.
+
+ The defaults are NOPs, but can be overriden by machine dependent
+ include files or config.h
+ */
+
+#ifndef update_uuo_prolog
+#define update_uuo_prolog()
+#endif
+
+#ifndef update_uuo_epilog
+#define update_uuo_epilog()
+#endif
+
+#ifndef compiler_cache_prolog
+#define compiler_cache_prolog()
+#endif
+
+#ifndef compiler_cache_epilog
+#define compiler_cache_epilog()
+#endif
+
+#ifndef compiler_trap_prolog
+#define compiler_trap_prolog()
+#endif
+
+#ifndef compiler_trap_epilog
+#define compiler_trap_epilog()
+#endif
+
+#ifndef compiler_uncache_prolog
+#define compiler_uncache_prolog()
+#endif
+
+#ifndef compiler_uncache_epilog
+#define compiler_uncache_epilog()
+#endif
+
+#ifndef compiler_recache_prolog
+#define compiler_recache_prolog()
+#endif
+
+#ifndef compiler_recache_epilog
+#define compiler_recache_epilog()
+#endif
+\f
/* Fast variable reference mechanism for compiled code.
compiler_cache is the core of the variable caching mechanism.
a fake cache is created and all the assignment references are
updated to point to it.
*/
+\f
+#ifndef PARALLEL_PROCESSOR
+
+#define compiler_cache_consistency_check()
+
+#else /* PARALLEL_PROCESSOR */
+/* The purpose of this code is to avoid a lock gap.
+ A re-definition can take place before the lock is grabbed
+ and we will be caching to the wrong cell.
+ To be paranoid we redo the lookup while we have the
+ cell locked and confim that we still have the correct cell.
+
+ Note that this lookup can be "shallow" since the result of
+ the previous lookup is saved in my_variable. The "shallow"
+ lookup code takes care of performing a deep lookup if the
+ cell has been "dangerized".
+ */
+
+#define compiler_cache_consistency_check() \
+{ \
+ Pointer *new_cell; \
+ \
+ compiler_cache_variable[VARIABLE_SYMBOL] = name; \
+ new_cell = lookup_cell(compiler_cache_variable, env); \
+ if (cell != new_cell) \
+ { \
+ remove_lock(set_serializer); \
+ cell = new_cell; \
+ goto compiler_cache_retry; \
+ } \
+}
+
+#endif /* PARALLEL_PROCESSOR */
+
+extern Pointer compiler_cache_variable[];
extern long compiler_cache();
+Pointer compiler_cache_variable[3];
+\f
long
-compiler_cache(cell, name, block, offset, kind)
+compiler_cache(cell, env, name, block, offset, kind, first_time)
fast Pointer *cell;
- Pointer name, block;
+ Pointer env, name, block;
long offset, kind;
+ Boolean first_time;
{
+ long cache_reference_end();
Lock_Handle set_serializer;
fast Pointer trap, references, extension;
Pointer trap_value, store_trap_tag, store_extension;
long trap_kind, return_value;
-
+
store_trap_tag = NIL;
store_extension = NIL;
trap_kind = TRAP_COMPILER_CACHED;
+compiler_cache_retry:
+
setup_lock(set_serializer, cell);
+ compiler_cache_consistency_check();
+ compiler_cache_prolog();
+
trap = *cell;
trap_value = trap;
\f
break;
default:
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (ERR_ILLEGAL_REFERENCE_TRAP);
}
if (GC_allocate_test(MAXIMUM_CACHE_SIZE))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
Request_GC(MAXIMUM_CACHE_SIZE);
return (PRIM_INTERRUPT);
#if false
/* This is included in the check above. */
- if (GC_allocate_test(7))
+ if (GC_allocate_test(9))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
- Request_GC(7);
+ Request_GC(9);
return (PRIM_INTERRUPT);
}
#endif
/* It is not really from compiled code.
The environment linking stuff wants a cc cache instead.
*/
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (PRIM_DONE);
}
if (GC_allocate_test(4))
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
Request_GC(4);
return (PRIM_INTERRUPT);
Make_Unsigned_Fixnum(offset));
if (return_value != PRIM_DONE)
{
+ compiler_cache_epilog();
remove_lock(set_serializer);
return (return_value);
}
}
\f
- /* Install an extension or a uuo link in the cc block, and remove
- the lock.
- */
+ /* Install an extension or a uuo link in the cc block. */
+
+ return_value = cache_reference_end(kind, extension, store_extension,
+ block, offset, trap_value);
+
+ /* Unlock and return */
+
+ compiler_cache_epilog();
+ remove_lock(set_serializer);
+ return (return_value);
+}
- return_value = PRIM_DONE;
+long
+cache_reference_end(kind, extension, store_extension,
+ block, offset, value)
+ long kind, offset;
+ Pointer extension, store_extension, block, value;
+{
+ extern void
+ store_variable_cache();
+ extern long
+ make_uuo_link(),
+ make_fake_uuo_link();
switch(kind)
{
case TRAP_REFERENCES_ASSIGNMENT:
if (store_extension != NIL)
{
- extern void store_variable_cache();
-
store_variable_cache(store_extension, block, offset);
- break;
+ return (PRIM_DONE);
}
/* Fall through */
case TRAP_REFERENCES_LOOKUP:
- {
- extern void store_variable_cache();
-
store_variable_cache(extension, block, offset);
- break;
- }
+ return (PRIM_DONE);
case TRAP_REFERENCES_OPERATOR:
{
- extern long make_uuo_link(), make_fake_uuo_link();
-
- if (REFERENCE_TRAP_P(trap_value))
+ if (REFERENCE_TRAP_P(value))
{
- return_value = make_fake_uuo_link(extension, block, offset);
+ return (make_fake_uuo_link(extension, block, offset));
}
else
{
- return_value = make_uuo_link(trap_value, extension, block, offset);
+ return (make_uuo_link(value, extension, block, offset));
}
- break;
}
}
-
- remove_lock(set_serializer);
- return (return_value);
+ /*NOTREACHED*/
}
\f
-/* This procedure invokes cache_reference after finding the top-level
+/* This procedure invokes compiler_cache after finding the top-level
value cell associated with (env, name).
*/
long
-compiler_cache_reference(env, name, block, offset, kind)
+compiler_cache_reference(env, name, block, offset, kind, first_time)
Pointer env, name, block;
long offset, kind;
+ Boolean first_time;
{
Pointer *cell;
- cell = deep_lookup(env, name, fake_variable_object);
+ cell = deep_lookup(env, name, compiler_cache_variable);
if (cell == unbound_trap_object)
{
long message;
return (message);
}
}
- return (compiler_cache(cell, name, block, offset, kind));
+ return (compiler_cache(cell, env, name, block, offset, kind, first_time));
}
-
+\f
/* This procedure updates all the references in the cached reference
list pointed at by slot to hold value. It also eliminates "empty"
pairs (pairs whose weakly held block has vanished).
return (PRIM_DONE);
}
\f
+extern Pointer compiled_block_environment();
+
+static long
+ trap_map_table[] = {
+ TRAP_REFERENCES_LOOKUP,
+ TRAP_REFERENCES_ASSIGNMENT,
+ TRAP_REFERENCES_OPERATOR
+ };
+
+#define TRAP_MAP_TABLE_SIZE (sizeof(trap_map_table) / sizeof(long))
+
+#ifndef DEFINITION_RECACHES_EAGERLY
+
/* compiler_uncache_slot uncaches all references in the list pointed
at by slot, and clears the list. If the references are operator
references, a fake compiled procedure which will recache when
- invoke is created and installed.
+ invoked is created and installed.
*/
long
*Free++ = sym;
*Free++ = block;
*Free++ = offset;
+\f
if (kind == TRAP_REFERENCES_OPERATOR)
{
extern long make_fake_uuo_link();
sym is the name of the variable.
*/
-static long trap_map_table[] =
- { TRAP_REFERENCES_LOOKUP,
- TRAP_REFERENCES_ASSIGNMENT,
- TRAP_REFERENCES_OPERATOR};
-
-extern long compiler_uncache();
-
long
compiler_uncache(value_cell, sym)
Pointer *value_cell, sym;
return (PRIM_DONE);
}
+ compiler_uncache_prolog();
+
extension = Fast_Vector_Ref(val, TRAP_EXTRA);
references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
update_lock(set_serializer, Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL));
/* Uncache all of the lists. */
- for (i = 0; i < 3; i++)
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
{
index = trap_map_table[i];
temp = compiler_uncache_slot(Nth_Vector_Loc(references, index),
if (temp != PRIM_DONE)
{
remove_lock(set_serializer);
+ compiler_uncache_epilog();
return (temp);
}
}
/* Remove the clone extension if there is one. */
Fast_Vector_Set(extension, TRAP_EXTENSION_CLONE, NIL);
+ compiler_uncache_epilog();
remove_lock(set_serializer);
return (PRIM_DONE);
}
+
+#endif /* DEFINITION_RECACHES_EAGERLY */
+\f
+#ifdef DEFINITION_RECACHES_EAGERLY
+
+/*
+ compiler_recache is invoked when a redefinition occurs. It
+ recaches (at the definition point) all the references that need to
+ point to the new cell.
+
+ It does this in two phases:
+
+ - First (by means of compiler_recache_split) it splits all
+ references into those that need to be updated and those that do
+ not. This is done by side-effecting the list so that all those
+ that need updating are at the end, and when we actually decide to
+ go ahead, we can just clip it and install it in the new location.
+ compiler_recache_split also counts how many entries are affected,
+ so the total amount of gc space needed can be computed.
+
+ - After checking that there is enough space to proceed, (rather
+ than aborting) it actually does the recaching. It caches to the
+ new location/value by using compiler_recache_slot. Note that the
+ eventual trap extension has already been allocated so the recached
+ links can point to it.
+ */
+
+/* Required by compiler_uncache macro. */
+
+Pointer *shadowed_value_cell = ((Pointer *) NULL);
+
+/* Each extension is a hunk4. */
+
+#define SPACE_PER_EXTENSION 4
+
+/* Trap, extension, and one cache-list hunk. */
+
+#define SPACE_PER_TRAP (2 + SPACE_PER_EXTENSION + 3)
+
+/* 1 Pair and 1 Weak pair.
+ Not really needed since the pairs and weak pairs are reused.
+ */
+
+#define SPACE_PER_ENTRY (2 + 2)
+
+/* Hopefully a conservative guesstimate. */
+
+#ifndef SPACE_PER_LINK /* So it can be overriden from config.h */
+#define SPACE_PER_LINK 10
+#endif SPACE_PER_LINK
+\f
+/* The spaces are 0 because the pairs are reused! If that ever changes,
+ they should all become SPACE_PER_ENTRY + curent value.
+ */
+
+#define SPACE_PER_LOOKUP 0
+#define SPACE_PER_ASSIGNMENT 0
+#define SPACE_PER_OPERATOR (0 + SPACE_PER_LINK)
+
+static long
+ trap_size_table[TRAP_MAP_TABLE_SIZE] = {
+ SPACE_PER_LOOKUP,
+ SPACE_PER_ASSIGNMENT,
+ SPACE_PER_OPERATOR
+ };
+
+static long
+ trap_conflict_table[TRAP_MAP_TABLE_SIZE] = {
+ 0, /* lookup */
+ 1, /* assignment */
+ 1 /* operator */
+ };
+
+Boolean
+environment_ancestor_or_self_p(ancestor, descendant)
+ fast Pointer ancestor, descendant;
+{
+ while (OBJECT_TYPE(descendant) != GLOBAL_ENV)
+ {
+ if (descendant == ancestor)
+ return (true);
+ descendant = Fast_Vector_Ref(Vector_Ref(descendant,
+ ENVIRONMENT_FUNCTION),
+ PROCEDURE_ENVIRONMENT);
+ }
+ return (descendant == ancestor);
+}
+\f
+/* This reorders the entries in slot so that the entries that are
+ not affected by the redefinition appear first, and the affected
+ ones appear last. A pointer to the first affected cell is stored
+ in memoize_cell, and this will be given to compiler_recache_slot
+ in order to avoid recomputing the division.
+
+ Note: There is an implicit assumption throughout that none of the
+ pairs (or weak pairs) are in pure space. If they are, they cannot
+ be sorted or reused.
+ */
+
+long
+compiler_recache_split(slot, sym, definition_env, memoize_cell)
+ fast Pointer *slot;
+ Pointer sym, definition_env, **memoize_cell;
+{
+ fast long count;
+ Pointer weak_pair, block, reference_env, invalid_head;
+ fast Pointer *last_invalid;
+
+ count = 0;
+ last_invalid = &invalid_head;
+
+ while (*slot != NIL)
+ {
+ weak_pair = Fast_Vector_Ref(*slot, CONS_CAR);
+ block = Fast_Vector_Ref(weak_pair, CONS_CAR);
+ if (block == NIL)
+ {
+ *slot = Fast_Vector_Ref(*slot, CONS_CDR);
+ continue;
+ }
+ reference_env = compiled_block_environment(block);
+ if (!environment_ancestor_or_self_p(definition_env, reference_env))
+ {
+ slot = Nth_Vector_Loc(*slot, CONS_CDR);
+ }
+ else
+ {
+ count += 1;
+ *last_invalid = *slot;
+ last_invalid = Nth_Vector_Loc(*slot, CONS_CDR);
+ *slot = *last_invalid;
+ }
+ }
+ *last_invalid = NIL;
+ *memoize_cell = slot;
+ *slot = invalid_head;
+ return (count);
+}
+\f
+/* This recaches the entries pointed out by cell and adds them
+ to the list in slot. It also sets to NIL the contents
+ of cell.
+
+ Note that this reuses the pairs and weak pairs that used to be
+ in cell.
+ */
+
+long
+compiler_recache_slot(extension, sym, kind, slot, cell, value)
+ Pointer extension, sym, value;
+ fast Pointer *slot, *cell;
+ long kind;
+{
+ fast Pointer pair, weak_pair;
+ Pointer clone, tail;
+ long result;
+
+ /* This is NIL if there isn't one.
+ This makes cache_reference_end do the right thing.
+ */
+ clone = Fast_Vector_Ref(extension, TRAP_EXTENSION_CLONE);
+ tail = *slot;
+
+ for (pair = *cell; pair != NULL; pair = *cell)
+ {
+ weak_pair = Fast_Vector_Ref(pair, CONS_CAR);
+ result = cache_reference_end(kind, extension, clone,
+ Fast_Vector_Ref(weak_pair, CONS_CAR),
+ Get_Integer(Fast_Vector_Ref(weak_pair,
+ CONS_CDR)),
+ value);
+ if (result != PRIM_DONE)
+ {
+ /* We are severely screwed.
+ compiler_recache will do the appropriate thing.
+ */
+ *slot = tail;
+ return (result);
+ }
+
+ *slot = pair;
+ slot = Nth_Vector_Loc(pair, CONS_CDR);
+ *cell = *slot;
+ }
+ *slot = tail;
+ return (PRIM_DONE);
+}
+\f
+long
+compiler_recache(old_value_cell, new_value_cell, env, sym, value,
+ shadowed_p, link_p)
+ Pointer *old_value_cell, *new_value_cell, env, sym, value;
+ Boolean shadowed_p, link_p;
+{
+ Lock_Handle set_serializer_1, set_serializer_2;
+ Pointer
+ old_value, references, extension, new_extension, new_trap,
+ *trap_info_table[TRAP_MAP_TABLE_SIZE];
+ long
+ trap_kind, temp, i, index, total_size, total_count, conflict_count;
+
+ setup_locks(set_serializer_1, old_value_cell,
+ set_serializer_2, new_value_cell);
+
+ if ((!link_p) && (*new_value_cell != DANGEROUS_UNBOUND_OBJECT))
+ {
+ /* Another processor has redefined this word in the meantime.
+ The other processor must have recached all the compiled code
+ caches since it is shadowing the same variable.
+ The definition has become a redefinition.
+ */
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (redefinition(new_value_cell, value));
+ }
+
+ old_value = *old_value_cell;
+
+ if (!(REFERENCE_TRAP_P(old_value)))
+ {
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ get_trap_kind(trap_kind, old_value);
+ if ((trap_kind != TRAP_COMPILER_CACHED) &&
+ (trap_kind != TRAP_COMPILER_CACHED_DANGEROUS))
+ {
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ compiler_recache_prolog();
+
+ extension = Fast_Vector_Ref(old_value, TRAP_EXTRA);
+ references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
+ update_lock(set_serializer_1,
+ Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL));
+\f
+ /*
+ Split each slot and compute the amount to allocate.
+ */
+
+ conflict_count = 0;
+ total_size = (link_p ? 0 : SPACE_PER_TRAP);
+ total_count = 0;
+
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
+ {
+ index = trap_map_table[i];
+ temp = compiler_recache_split(Nth_Vector_Loc(references, index),
+ sym, env, &trap_info_table[i]);
+
+ if (temp != 0)
+ {
+ conflict_count += trap_conflict_table[i];
+ total_size += (temp * trap_size_table[i]);
+ total_count += temp;
+ }
+ }
+
+ if (total_count == 0)
+ {
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (link_p ?
+ PRIM_DONE :
+ definition(new_value_cell, value, shadowed_p));
+ }
+
+ if ((conflict_count == 2) &&
+ ((!link_p) ||
+ (new_value_cell[TRAP_EXTENSION_CLONE] == NIL)))
+ {
+ total_size += SPACE_PER_EXTENSION;
+ }
+
+ if (GC_allocate_test(total_size))
+ {
+ /* Unfortunate fact of life: This binding will be dangerous
+ even if there is no need, but this is the only way to
+ guarantee consistent values.
+ */
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ Request_GC(total_size);
+ return (PRIM_INTERRUPT);
+ }
+\f
+ /*
+ Allocate and initialize all the cache structures if necessary.
+ */
+
+ if (link_p)
+ {
+ new_extension = Make_Pointer(TRAP_EXTENSION_TYPE, new_value_cell);
+ references = new_value_cell[TRAP_EXTENSION_REFERENCES];
+ }
+ else
+ {
+ /* The reference trap is created here, but is not installed in the
+ environment structure until the end. The new binding contains
+ a DANGEROUS_UNBOUND_OBJECT so that other parallel lookups will
+ skip this binding.
+ */
+
+ references = Make_Pointer(TRAP_REFERENCES_TYPE, Free);
+
+ *Free++ = NIL;
+ *Free++ = NIL;
+ *Free++ = NIL;
+
+ new_extension = Make_Pointer(TRAP_EXTENSION_TYPE, Free);
+
+ *Free++ = value;
+ *Free++ = sym;
+ *Free++ = NIL;
+ *Free++ = references;
+
+ new_trap = Make_Pointer(TC_REFERENCE_TRAP, Free);
+ *Free++ = Make_Unsigned_Fixnum((shadowed_p ?
+ TRAP_COMPILER_CACHED_DANGEROUS :
+ TRAP_COMPILER_CACHED));
+ *Free++ = new_extension;
+ }
+
+ if ((conflict_count == 2) &&
+ (Vector_Ref(new_extension, TRAP_EXTENSION_CLONE) == NIL))
+ {
+ Pointer clone;
+
+ clone = Make_Pointer(TRAP_EXTENSION_TYPE, Free);
+
+ *Free++ = EXPENSIVE_ASSIGNMENT_OBJECT;
+ *Free++ = sym;
+ *Free++ = new_extension;
+ *Free++ = references;
+ Fast_Vector_Set(new_extension, TRAP_EXTENSION_CLONE, clone);
+ }
+\f
+ /*
+ Now we actually perform the recaching, allocating freely.
+ */
+
+ for (i = TRAP_MAP_TABLE_SIZE; --i >= 0; )
+ {
+ index = trap_map_table[i];
+ temp = compiler_recache_slot(new_extension, sym, index,
+ Nth_Vector_Loc(references, index),
+ trap_info_table[i],
+ value);
+ if (temp != PRIM_DONE)
+ {
+ extern char *Abort_Names[], *Error_Names[];
+
+ /* We've lost BIG. */
+
+ if (temp == PRIM_INTERRUPT)
+ fprintf(stderr,
+ "\ncompiler_recache: Ran out of guaranteed space!\n");
+ else if (temp > 0)
+ fprintf(stderr,
+ "\ncompiler_recache: Unexpected error value %d (%s)\n",
+ temp, Abort_Names[temp]);
+ else
+ fprintf(stderr,
+ "\ncompiler_recache: Unexpected abort value %d (%s)\n",
+ -temp, Abort_Names[(-temp) - 1]);
+ Microcode_Termination(TERM_EXIT);
+ }
+ }
+
+ if (!link_p)
+ {
+ *new_value_cell = new_trap;
+ }
+ compiler_recache_epilog();
+ remove_locks(set_serializer_1, set_serializer_2);
+ return (PRIM_DONE);
+}
+
+#endif /* DEFINITION_RECACHES_EAGERLY */
\f
/* recache_uuo_links is invoked when an assignment occurs to a
variable which has cached operator references (uuo links).
fast Pointer *slot;
long return_value;
+ update_uuo_prolog();
references = Fast_Vector_Ref(extension, TRAP_EXTENSION_REFERENCES);
slot = Nth_Vector_Loc(references, TRAP_REFERENCES_OPERATOR);
Get_Integer(Fast_Vector_Ref(pair, CONS_CDR)));
if (return_value != PRIM_DONE)
{
+ update_uuo_epilog();
return (return_value);
}
slot = Nth_Vector_Loc(*slot, CONS_CDR);
fix_references(Nth_Vector_Loc(references, TRAP_REFERENCES_ASSIGNMENT),
extension);
}
+ update_uuo_epilog();
return (PRIM_DONE);
}
\f
Otherwise the reference is done normally, and the process continued.
*/
-extern Pointer compiled_block_environment();
-
long
compiler_reference_trap(extension, kind, handler)
Pointer extension;
long offset, temp;
Pointer block;
+try_again:
+
if (Vector_Ref(extension, TRAP_EXTENSION_CELL) != REQUEST_RECACHE_OBJECT)
{
return ((*handler)(Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL),
block = Fast_Vector_Ref(extension, TRAP_EXTENSION_BLOCK);
offset = Get_Integer(Fast_Vector_Ref(extension, TRAP_EXTENSION_OFFSET));
+
+ compiler_trap_prolog();
temp =
compiler_cache_reference(compiled_block_environment(block),
Fast_Vector_Ref(extension, TRAP_EXTENSION_NAME),
- block,
- offset,
- kind);
+ block, offset, kind, false);
+ compiler_trap_epilog();
if (temp != PRIM_DONE)
{
return (temp);
{
case TRAP_REFERENCES_OPERATOR:
{
+
/* Note that this value may cause another operator trap when
invoked, since it may be a uuo-link to an interpreted
- procedure, or to a variable with a trap in it. It should not
- go into a loop however, because the reference will be cached
- to the correct place, so the extension will no longer have a
- REQUEST_RECACHE_OBJECT in it. The first branch in this
- procedure will be taken in this case.
+ procedure, or to a variable with a trap in it. However, it
+ should not go into a loop because the reference should be
+ cached to the correct place, so the extension will no longer
+ have a REQUEST_RECACHE_OBJECT in it. The first branch in
+ this procedure will be taken in this case. On a
+ multiprocessor it may in fact loop if some other processor
+ redefines the variable before we have a chance to invoke the
+ value.
*/
extern Pointer extract_uuo_link();
default:
{
extern Pointer extract_variable_cache();
- Pointer extension;
extension = extract_variable_cache(block, offset);
- return ((*handler)(Nth_Vector_Loc(extension, TRAP_EXTENSION_CELL),
- fake_variable_object));
+ /* This is paranoid on a single processor, but it does not hurt.
+ On a multiprocessor, we need to do it because some other processor
+ may have redefined this variable in the meantime.
+ */
+ goto try_again;
}
}
}
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_LOOKUP));
+ TRAP_REFERENCES_LOOKUP, true));
}
long
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_ASSIGNMENT));
+ TRAP_REFERENCES_ASSIGNMENT, true));
}
long
{
return (compiler_cache_reference(compiled_block_environment(block),
name, block, offset,
- TRAP_REFERENCES_OPERATOR));
+ TRAP_REFERENCES_OPERATOR, true));
}
\f
extern long complr_operator_reference_trap();
promotional, or sales literature without prior written consent from
MIT in each case. */
-/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/lookup.h,v 9.40 1988/05/03 19:21:57 jinx Exp $ */
+/* $Header: /Users/cph/tmp/foo/mit-scheme/mit-scheme/v8/src/microcode/lookup.h,v 9.41 1988/09/29 05:02:21 jinx Exp $ */
/* Macros and declarations for the variable lookup code. */
#define setup_lock(handle, cell) handle = Lock_Cell(cell)
#define remove_lock(handle) Unlock_Cell(handle)
+\f
+/* This should prevent a deadly embrace if whole contiguous
+ regions are locked, rather than individual words.
+ */
-#else
+#define setup_locks(hand1, cel1, hand2, cel2) \
+{ \
+ if (LOCK_FIRST(cel1, cel2)) \
+ { \
+ setup_lock(hand1, cel1); \
+ setup_lock(hand2, cel2); \
+ } \
+ else \
+ { \
+ setup_lock(hand2, cel2); \
+ setup_lock(hand1, cel1); \
+ } \
+}
+
+#define remove_locks(hand1, hand2) \
+{ \
+ remove_lock(hand2); \
+ remove_lock(hand1); \
+}
+
+#else /* not PARALLEL_PROCESSOR */
#define verify(type_code, variable, code, label)
#define verified_offset(variable, code) code
#define setup_lock(handle, cell)
#define remove_lock(ignore)
+#define setup_locks(hand1, cel1, hand2, cel2)
+#define remove_locks(ign1, ign2)
-#endif
+#endif /* PARALLEL_PROCESSOR */
/* This is provided as a separate macro so that it can be made
atomic if necessary.
cell = Nth_Vector_Loc(frame, CONS_CDR); \
break; \
}
+\f
+/* Macros and exports for incremental definition and hooks. */
+
+extern long extend_frame();
+
+#ifndef DEFINITION_RECACHES_EAGERLY
+
+extern long compiler_uncache();
+
+#define simple_uncache(cell, sym) PRIM_DONE
+
+#define shadowing_recache(cell, env, sym, value, shadowed_p) \
+ definition(cell, value, shadowed_p)
+
+#define compiler_recache(old, new, env, sym, val, shadowed_p, link_p) \
+ PRIM_DONE
+
+#else /* DEFINITION_RECACHES_EAGERLY */
+
+extern long compiler_recache();
+
+extern Pointer *shadowed_value_cell;
+
+#define compiler_uncache(cell, sym) \
+ (shadowed_value_cell = cell, PRIM_DONE)
+
+#define simple_uncache(cell, sym) \
+ compiler_uncache(cell, sym)
+
+#define shadowing_recache(cell, env, sym, value, shadowed_p) \
+ compiler_recache(shadowed_value_cell, cell, env, sym, value, \
+ shadowed_p, false)
+
+#endif /* DEFINITION_RECACHES_EAGERLY */