Load the fallback into rax so caller needs no conditional branch.
authorTaylor R Campbell <campbell@mumble.net>
Wed, 2 Jan 2019 06:10:52 +0000 (06:10 +0000)
committerTaylor R Campbell <campbell@mumble.net>
Tue, 13 Aug 2019 14:37:02 +0000 (14:37 +0000)
WARNING: This changes the amd64 compiled code interface so that new
compiled code requires a new microcode.  (However, a new microcode
should handle old compiled code without trouble, since old compiled
code treats rax as garbage at this point, and LEA does not affect
flags.)

src/compiler/machines/x86-64/rules3.scm
src/microcode/cmpauxmd/x86-64.m4

index 787efe41c696c84d690b011e83abbd6be14e836d..3193f00e66ef737b4ee354ea4b72380aad34e3a8 100644 (file)
@@ -110,10 +110,7 @@ USA.
             (else
              (LAP (MOV Q (R ,rdx) (&U ,frame-size))
                   ,@(invoke-hook/subroutine entry:compiler-apply-setup))))
-        (JNE (@PCR ,generic))
-        (JMP (R ,rax))
-       (LABEL ,generic)
-        ,@(invoke-hook entry:compiler-shortcircuit-apply))))
+        (JMP (R ,rax)))))
 
 (define-rule statement
   (INVOCATION:JUMP (? frame-size) (? continuation) (? label))
index f98d8426c0a8d7ef22d1bf448c05173cbd01f45e..38dbabaf6e24f7f62abd944a599a94dda8f2ad01 100644 (file)
@@ -613,6 +613,9 @@ define_hook_label(apply_setup)
        ret
 
 asm_apply_setup_fail:
+       # Load the fallback address into rax, where compiled code will
+       # jump.
+       OP(lea,q)       TW(ABS(EVR(asm_sc_apply_generic)),REG(rax))
        ret
 
 define(define_apply_setup_fixed_size,
@@ -631,6 +634,7 @@ define_hook_label(apply_setup_size_$1)
        ret
 
 asm_apply_setup_size_$1_fail:
+       OP(lea,q)       TW(ABS(EVR(asm_sc_apply_generic)),REG(rax))
        OP(mov,q)       TW(IMM(HEX($1)),REG(rdx))
        ret')