From 473bb95ce6b7b4e0f588ba2c1ee13efc35e4a7c7 Mon Sep 17 00:00:00 2001 From: Packit Service Date: Dec 09 2020 15:18:08 +0000 Subject: Prepare for a new update Reverting patches so we can apply the latest update and changes can be seen in the spec file and sources. --- diff --git a/mpn/Makeasm.am b/mpn/Makeasm.am index d70c5cd..5d7306c 100644 --- a/mpn/Makeasm.am +++ b/mpn/Makeasm.am @@ -66,7 +66,7 @@ SUFFIXES = .s .S .asm # can be overridden during development, eg. "make RM_TMP=: mul_1.lo" -RM_TMP = true +RM_TMP = rm -f # .S assembler, preprocessed with cpp. diff --git a/mpn/m4-ccas b/mpn/m4-ccas index 22de52a..16d80c6 100755 --- a/mpn/m4-ccas +++ b/mpn/m4-ccas @@ -104,4 +104,4 @@ echo "$CC" $CC || exit # Comment this out to preserve .s intermediates -#rm -f $TMP +rm -f $TMP diff --git a/mpn/x86_64/addaddmul_1msb0.asm b/mpn/x86_64/addaddmul_1msb0.asm index 2bfa122..87c21b4 100644 --- a/mpn/x86_64/addaddmul_1msb0.asm +++ b/mpn/x86_64/addaddmul_1msb0.asm @@ -168,4 +168,3 @@ L(end): cmp $1, R32(n) pop %r12 ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/addmul_2.asm b/mpn/x86_64/addmul_2.asm index 2999ce5..18307d7 100644 --- a/mpn/x86_64/addmul_2.asm +++ b/mpn/x86_64/addmul_2.asm @@ -182,4 +182,3 @@ L(end): xor R32(w1), R32(w1) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aorrlsh1_n.asm b/mpn/x86_64/aorrlsh1_n.asm index 9ebd7dc..6ee0872 100644 --- a/mpn/x86_64/aorrlsh1_n.asm +++ b/mpn/x86_64/aorrlsh1_n.asm @@ -168,4 +168,3 @@ ifdef(`OPERATION_rsblsh1_n',` FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aorrlshC_n.asm b/mpn/x86_64/aorrlshC_n.asm index c3d55a6..5a9fd4d 100644 --- a/mpn/x86_64/aorrlshC_n.asm +++ b/mpn/x86_64/aorrlshC_n.asm @@ -158,4 +158,3 @@ ifelse(ADDSUB,add,` FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aorrlsh_n.asm b/mpn/x86_64/aorrlsh_n.asm index 7dd0bcf..5ca128f 100644 --- a/mpn/x86_64/aorrlsh_n.asm +++ b/mpn/x86_64/aorrlsh_n.asm @@ -174,4 +174,3 @@ L(end): add R32(%rbx), R32(%rbx) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aors_err1_n.asm b/mpn/x86_64/aors_err1_n.asm index 13a6af2..54d0b3f 100644 --- a/mpn/x86_64/aors_err1_n.asm +++ b/mpn/x86_64/aors_err1_n.asm @@ -223,4 +223,3 @@ L(end): pop %rbx ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aors_err2_n.asm b/mpn/x86_64/aors_err2_n.asm index 0466f06..ce5c2a4 100644 --- a/mpn/x86_64/aors_err2_n.asm +++ b/mpn/x86_64/aors_err2_n.asm @@ -170,4 +170,3 @@ L(end): pop %rbx ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aors_err3_n.asm b/mpn/x86_64/aors_err3_n.asm index cc5461f..bb6d0c5 100644 --- a/mpn/x86_64/aors_err3_n.asm +++ b/mpn/x86_64/aors_err3_n.asm @@ -154,4 +154,3 @@ L(end): pop %rbx ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aors_n.asm b/mpn/x86_64/aors_n.asm index 361e04d..8941f7a 100644 --- a/mpn/x86_64/aors_n.asm +++ b/mpn/x86_64/aors_n.asm @@ -167,4 +167,3 @@ L(end): lea 32(up), up FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/aorsmul_1.asm b/mpn/x86_64/aorsmul_1.asm index 25d0c13..e3fc005 100644 --- a/mpn/x86_64/aorsmul_1.asm +++ b/mpn/x86_64/aorsmul_1.asm @@ -178,4 +178,3 @@ IFDOS(``pop %rdi '') IFDOS(``pop %rsi '') ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/addmul_2.asm b/mpn/x86_64/atom/addmul_2.asm index 07ae7b8..c1dcdc4 100644 --- a/mpn/x86_64/atom/addmul_2.asm +++ b/mpn/x86_64/atom/addmul_2.asm @@ -184,4 +184,3 @@ L(end): mul v1 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/aorrlsh1_n.asm b/mpn/x86_64/atom/aorrlsh1_n.asm index f9d7bac..f44de19 100644 --- a/mpn/x86_64/atom/aorrlsh1_n.asm +++ b/mpn/x86_64/atom/aorrlsh1_n.asm @@ -236,4 +236,3 @@ IFDOS(` mov 56(%rsp), %r8 ') sbb R32(%rbp), R32(%rbp) C save acy jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/aorrlsh2_n.asm b/mpn/x86_64/atom/aorrlsh2_n.asm index 5ea55b4..02fb29d 100644 --- a/mpn/x86_64/atom/aorrlsh2_n.asm +++ b/mpn/x86_64/atom/aorrlsh2_n.asm @@ -189,4 +189,3 @@ ifdef(`OPERATION_rsblsh2_n',` FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/aorsmul_1.asm b/mpn/x86_64/atom/aorsmul_1.asm index 6a12f96..e953153 100644 --- a/mpn/x86_64/atom/aorsmul_1.asm +++ b/mpn/x86_64/atom/aorsmul_1.asm @@ -188,4 +188,3 @@ L(cj1): ADDSUB %rax, (rp,n,8) ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/atom/lshift.asm b/mpn/x86_64/atom/lshift.asm index 15786cb..1b37d5d 100644 --- a/mpn/x86_64/atom/lshift.asm +++ b/mpn/x86_64/atom/lshift.asm @@ -121,4 +121,3 @@ L(end): shl R8(%rcx), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/lshiftc.asm b/mpn/x86_64/atom/lshiftc.asm index 3171d3c..7385f8f 100644 --- a/mpn/x86_64/atom/lshiftc.asm +++ b/mpn/x86_64/atom/lshiftc.asm @@ -125,4 +125,3 @@ L(end): shl R8(%rcx), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/mul_1.asm b/mpn/x86_64/atom/mul_1.asm index 304c208..d76a3d3 100644 --- a/mpn/x86_64/atom/mul_1.asm +++ b/mpn/x86_64/atom/mul_1.asm @@ -141,4 +141,3 @@ IFDOS(` mov 56(%rsp), %r8 ') jmp L(com) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/atom/mul_2.asm b/mpn/x86_64/atom/mul_2.asm index c7b78a7..f3fc3af 100644 --- a/mpn/x86_64/atom/mul_2.asm +++ b/mpn/x86_64/atom/mul_2.asm @@ -184,4 +184,3 @@ L(end): mul v1 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/redc_1.asm b/mpn/x86_64/atom/redc_1.asm index eeb09d3..62b9a84 100644 --- a/mpn/x86_64/atom/redc_1.asm +++ b/mpn/x86_64/atom/redc_1.asm @@ -577,4 +577,3 @@ L(n4): mov -32(mp), %rax jmp L(cj) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/atom/rsh1aors_n.asm b/mpn/x86_64/atom/rsh1aors_n.asm index a589b89..6f5f638 100644 --- a/mpn/x86_64/atom/rsh1aors_n.asm +++ b/mpn/x86_64/atom/rsh1aors_n.asm @@ -285,4 +285,3 @@ L(cj1): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/rshift.asm b/mpn/x86_64/atom/rshift.asm index c8b78bf..29c027d 100644 --- a/mpn/x86_64/atom/rshift.asm +++ b/mpn/x86_64/atom/rshift.asm @@ -119,4 +119,3 @@ L(end): shr R8(cnt), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/atom/sublsh1_n.asm b/mpn/x86_64/atom/sublsh1_n.asm index 574b25b..1306acd 100644 --- a/mpn/x86_64/atom/sublsh1_n.asm +++ b/mpn/x86_64/atom/sublsh1_n.asm @@ -240,4 +240,3 @@ IFDOS(` mov 56(%rsp), %r8 ') sbb R32(%rbp), R32(%rbp) C save acy jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bd1/aorsmul_1.asm b/mpn/x86_64/bd1/aorsmul_1.asm index ce76154..96fec9f 100644 --- a/mpn/x86_64/bd1/aorsmul_1.asm +++ b/mpn/x86_64/bd1/aorsmul_1.asm @@ -179,4 +179,3 @@ IFDOS(``pop %rsi '') ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/bd1/mul_1.asm b/mpn/x86_64/bd1/mul_1.asm index 308f336..e59667c 100644 --- a/mpn/x86_64/bd1/mul_1.asm +++ b/mpn/x86_64/bd1/mul_1.asm @@ -182,4 +182,3 @@ IFDOS(``pop %rsi '') ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/bd1/mul_2.asm b/mpn/x86_64/bd1/mul_2.asm index f40cf47..4ed5f30 100644 --- a/mpn/x86_64/bd1/mul_2.asm +++ b/mpn/x86_64/bd1/mul_2.asm @@ -190,4 +190,3 @@ L(end): mov -8(up,n,8), %rax FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bd1/mul_basecase.asm b/mpn/x86_64/bd1/mul_basecase.asm index 6d61cbc..e47ba58 100644 --- a/mpn/x86_64/bd1/mul_basecase.asm +++ b/mpn/x86_64/bd1/mul_basecase.asm @@ -414,4 +414,3 @@ L(ret2):pop %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bdiv_dbm1c.asm b/mpn/x86_64/bdiv_dbm1c.asm index f9c4aa0..a53bd52 100644 --- a/mpn/x86_64/bdiv_dbm1c.asm +++ b/mpn/x86_64/bdiv_dbm1c.asm @@ -104,4 +104,3 @@ L(lo1): sub %rax, %r8 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bdiv_q_1.asm b/mpn/x86_64/bdiv_q_1.asm index 7bfa66d..02eacbe 100644 --- a/mpn/x86_64/bdiv_q_1.asm +++ b/mpn/x86_64/bdiv_q_1.asm @@ -165,4 +165,3 @@ L(one): shr R8(%rcx), %rax FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/aors_n.asm b/mpn/x86_64/bobcat/aors_n.asm index 1df1a08..22287b8 100644 --- a/mpn/x86_64/bobcat/aors_n.asm +++ b/mpn/x86_64/bobcat/aors_n.asm @@ -148,4 +148,3 @@ PROLOGUE(func_nc) IFDOS(` mov 56(%rsp), %r8 ') jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/aorsmul_1.asm b/mpn/x86_64/bobcat/aorsmul_1.asm index 79d81f4..415a17c 100644 --- a/mpn/x86_64/bobcat/aorsmul_1.asm +++ b/mpn/x86_64/bobcat/aorsmul_1.asm @@ -181,4 +181,3 @@ IFDOS(` pop %rdi ') IFDOS(` pop %rsi ') ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/copyd.asm b/mpn/x86_64/bobcat/copyd.asm index 2f781a3..877714e 100644 --- a/mpn/x86_64/bobcat/copyd.asm +++ b/mpn/x86_64/bobcat/copyd.asm @@ -89,4 +89,3 @@ L(end): cmp $-4, R32(n) L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/copyi.asm b/mpn/x86_64/bobcat/copyi.asm index ff249bc..ee0f578 100644 --- a/mpn/x86_64/bobcat/copyi.asm +++ b/mpn/x86_64/bobcat/copyi.asm @@ -92,4 +92,3 @@ L(end): cmp $4, R32(n) L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/mul_1.asm b/mpn/x86_64/bobcat/mul_1.asm index b4f401b..ab428a8 100644 --- a/mpn/x86_64/bobcat/mul_1.asm +++ b/mpn/x86_64/bobcat/mul_1.asm @@ -185,4 +185,3 @@ IFDOS(` pop %rdi ') IFDOS(` pop %rsi ') ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/mul_basecase.asm b/mpn/x86_64/bobcat/mul_basecase.asm index 14c7b13..e7d46bf 100644 --- a/mpn/x86_64/bobcat/mul_basecase.asm +++ b/mpn/x86_64/bobcat/mul_basecase.asm @@ -484,4 +484,3 @@ L(ret): pop %r13 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/bobcat/redc_1.asm b/mpn/x86_64/bobcat/redc_1.asm index d686cfb..d55b1e5 100644 --- a/mpn/x86_64/bobcat/redc_1.asm +++ b/mpn/x86_64/bobcat/redc_1.asm @@ -505,4 +505,3 @@ L(n3): mov -24(mp), %rax jmp L(ret) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/bobcat/sqr_basecase.asm b/mpn/x86_64/bobcat/sqr_basecase.asm index 5693c46..0e417a1 100644 --- a/mpn/x86_64/bobcat/sqr_basecase.asm +++ b/mpn/x86_64/bobcat/sqr_basecase.asm @@ -563,4 +563,3 @@ L(esd): add %rbx, w0 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/cnd_aors_n.asm b/mpn/x86_64/cnd_aors_n.asm index 35f30e7..13a2ab3 100644 --- a/mpn/x86_64/cnd_aors_n.asm +++ b/mpn/x86_64/cnd_aors_n.asm @@ -181,4 +181,3 @@ L(end): neg R32(%rax) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/com.asm b/mpn/x86_64/com.asm index 56b0747..006acaf 100644 --- a/mpn/x86_64/com.asm +++ b/mpn/x86_64/com.asm @@ -93,4 +93,3 @@ L(e10): movq 24(up,n,8), %r9 L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/copyd.asm b/mpn/x86_64/copyd.asm index 020e287..a5e6e59 100644 --- a/mpn/x86_64/copyd.asm +++ b/mpn/x86_64/copyd.asm @@ -91,4 +91,3 @@ L(end): shr R32(n) mov %r9, -16(rp) 1: ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/copyi.asm b/mpn/x86_64/copyi.asm index 1a4fb6d..bafce7a 100644 --- a/mpn/x86_64/copyi.asm +++ b/mpn/x86_64/copyi.asm @@ -90,4 +90,3 @@ L(end): shr R32(n) mov %r9, 16(rp) 1: ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/aors_err1_n.asm b/mpn/x86_64/core2/aors_err1_n.asm index 5162272..3f875ae 100644 --- a/mpn/x86_64/core2/aors_err1_n.asm +++ b/mpn/x86_64/core2/aors_err1_n.asm @@ -223,4 +223,3 @@ L(end): pop %rbx ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/aors_n.asm b/mpn/x86_64/core2/aors_n.asm index 19078d8..74a1bce 100644 --- a/mpn/x86_64/core2/aors_n.asm +++ b/mpn/x86_64/core2/aors_n.asm @@ -139,4 +139,3 @@ IFDOS(` mov 56(%rsp), %r8 ') jmp L(start) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/aorsmul_1.asm b/mpn/x86_64/core2/aorsmul_1.asm index 392f4de..6b313dd 100644 --- a/mpn/x86_64/core2/aorsmul_1.asm +++ b/mpn/x86_64/core2/aorsmul_1.asm @@ -176,4 +176,3 @@ L(n1): mov 8(rp), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/divrem_1.asm b/mpn/x86_64/core2/divrem_1.asm index 0a67dc3..1b3f139 100644 --- a/mpn/x86_64/core2/divrem_1.asm +++ b/mpn/x86_64/core2/divrem_1.asm @@ -241,4 +241,3 @@ L(ret): pop %rbx FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/gcd_1.asm b/mpn/x86_64/core2/gcd_1.asm index 452b763..bdb940c 100644 --- a/mpn/x86_64/core2/gcd_1.asm +++ b/mpn/x86_64/core2/gcd_1.asm @@ -144,4 +144,3 @@ L(end): pop %rcx FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/lshift.asm b/mpn/x86_64/core2/lshift.asm index 00b39b8..8ccafec 100644 --- a/mpn/x86_64/core2/lshift.asm +++ b/mpn/x86_64/core2/lshift.asm @@ -147,4 +147,3 @@ L(end): shld R8(cnt), %r8, %r11 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/lshiftc.asm b/mpn/x86_64/core2/lshiftc.asm index 4d3acfe..65c7b2f 100644 --- a/mpn/x86_64/core2/lshiftc.asm +++ b/mpn/x86_64/core2/lshiftc.asm @@ -157,4 +157,3 @@ L(end): shld R8(cnt), %r8, %r11 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/mul_basecase.asm b/mpn/x86_64/core2/mul_basecase.asm index 04cd4c2..d16be85 100644 --- a/mpn/x86_64/core2/mul_basecase.asm +++ b/mpn/x86_64/core2/mul_basecase.asm @@ -973,4 +973,3 @@ L(lo3): mul v0 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/mullo_basecase.asm b/mpn/x86_64/core2/mullo_basecase.asm index efed03d..0f03d86 100644 --- a/mpn/x86_64/core2/mullo_basecase.asm +++ b/mpn/x86_64/core2/mullo_basecase.asm @@ -425,4 +425,3 @@ L(n3): mov (vp_param), %r9 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/redc_1.asm b/mpn/x86_64/core2/redc_1.asm index d98f56f..8c296fd 100644 --- a/mpn/x86_64/core2/redc_1.asm +++ b/mpn/x86_64/core2/redc_1.asm @@ -428,4 +428,3 @@ L(n4): mov -32(mp), %rax jmp L(add_n) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/core2/rsh1aors_n.asm b/mpn/x86_64/core2/rsh1aors_n.asm index 579fec6..27eed37 100644 --- a/mpn/x86_64/core2/rsh1aors_n.asm +++ b/mpn/x86_64/core2/rsh1aors_n.asm @@ -167,4 +167,3 @@ L(end): shrd $1, %rbx, %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/rshift.asm b/mpn/x86_64/core2/rshift.asm index 97f4429..ab32ec8 100644 --- a/mpn/x86_64/core2/rshift.asm +++ b/mpn/x86_64/core2/rshift.asm @@ -145,4 +145,3 @@ L(end): shrd R8(cnt), %r8, %r11 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/sqr_basecase.asm b/mpn/x86_64/core2/sqr_basecase.asm index 0ee6ca3..a112c1b 100644 --- a/mpn/x86_64/core2/sqr_basecase.asm +++ b/mpn/x86_64/core2/sqr_basecase.asm @@ -982,4 +982,3 @@ L(n3): mov %rax, %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/core2/sublshC_n.asm b/mpn/x86_64/core2/sublshC_n.asm index 7a48dfb..5acc46b 100644 --- a/mpn/x86_64/core2/sublshC_n.asm +++ b/mpn/x86_64/core2/sublshC_n.asm @@ -156,4 +156,3 @@ L(end): shr $RSH, %r11 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreibwl/addmul_1.asm b/mpn/x86_64/coreibwl/addmul_1.asm index 4fb79f9..aaa58e7 100644 --- a/mpn/x86_64/coreibwl/addmul_1.asm +++ b/mpn/x86_64/coreibwl/addmul_1.asm @@ -107,39 +107,33 @@ L(tab): JMPENT( L(f0), L(tab)) JMPENT( L(f7), L(tab)) TEXT -L(f0): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f0): mulx( (up), %r10, %r8) lea -8(up), up lea -8(rp), rp lea -1(n), n jmp L(b0) -L(f3): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f3): mulx( (up), %r9, %rax) lea 16(up), up lea -48(rp), rp jmp L(b3) -L(f4): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f4): mulx( (up), %r10, %r8) lea 24(up), up lea -40(rp), rp jmp L(b4) -L(f5): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f5): mulx( (up), %r9, %rax) lea 32(up), up lea -32(rp), rp jmp L(b5) -L(f6): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f6): mulx( (up), %r10, %r8) lea 40(up), up lea -24(rp), rp jmp L(b6) -L(f1): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f1): mulx( (up), %r9, %rax) jrcxz L(1) jmp L(b1) L(1): add (rp), %r9 @@ -157,8 +151,7 @@ ifdef(`PIC', ` nop;nop;nop;nop', ` nop;nop;nop;nop;nop;nop;nop;nop;nop;nop;nop') -L(f2): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f2): mulx( (up), %r10, %r8) lea 8(up), up lea 8(rp), rp mulx( (up), %r9, %rax) @@ -202,11 +195,9 @@ L(b3): adox( 48,(rp), %r9) mulx( (up), %r9, %rax) jmp L(top) -L(f7): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f7): mulx( (up), %r9, %rax) lea -16(up), up lea -16(rp), rp jmp L(b7) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreibwl/mul_1.asm b/mpn/x86_64/coreibwl/mul_1.asm index 4fe4822..a271e6c 100644 --- a/mpn/x86_64/coreibwl/mul_1.asm +++ b/mpn/x86_64/coreibwl/mul_1.asm @@ -106,56 +106,48 @@ L(tab): JMPENT( L(f0), L(tab)) JMPENT( L(f7), L(tab)) TEXT -L(f0): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f0): mulx( (up), %r10, %r8) lea 56(up), up lea -8(rp), rp jmp L(b0) -L(f3): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f3): mulx( (up), %r9, %rax) lea 16(up), up lea 16(rp), rp inc n jmp L(b3) -L(f4): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f4): mulx( (up), %r10, %r8) lea 24(up), up lea 24(rp), rp inc n jmp L(b4) -L(f5): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f5): mulx( (up), %r9, %rax) lea 32(up), up lea 32(rp), rp inc n jmp L(b5) -L(f6): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f6): mulx( (up), %r10, %r8) lea 40(up), up lea 40(rp), rp inc n jmp L(b6) -L(f7): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f7): mulx( (up), %r9, %rax) lea 48(up), up lea 48(rp), rp inc n jmp L(b7) -L(f1): CFPROT_ENDBR - mulx( (up), %r9, %rax) +L(f1): mulx( (up), %r9, %rax) test n, n jnz L(b1) L(1): mov %r9, (rp) ret -L(f2): CFPROT_ENDBR - mulx( (up), %r10, %r8) +L(f2): mulx( (up), %r10, %r8) lea 8(up), up lea 8(rp), rp mulx( (up), %r9, %rax) @@ -199,4 +191,3 @@ L(end): mov %r10, -8(rp) ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreibwl/mul_basecase.asm b/mpn/x86_64/coreibwl/mul_basecase.asm index 74cd67c..50f3ce5 100644 --- a/mpn/x86_64/coreibwl/mul_basecase.asm +++ b/mpn/x86_64/coreibwl/mul_basecase.asm @@ -155,53 +155,45 @@ ifdef(`PIC', jmp *(%r10,%rax,8) ') -L(mf0): CFPROT_ENDBR - mulx( (up), w2, w3) +L(mf0): mulx( (up), w2, w3) lea 56(up), up lea -8(rp), rp jmp L(mb0) -L(mf3): CFPROT_ENDBR - mulx( (up), w0, w1) +L(mf3): mulx( (up), w0, w1) lea 16(up), up lea 16(rp), rp inc n jmp L(mb3) -L(mf4): CFPROT_ENDBR - mulx( (up), w2, w3) +L(mf4): mulx( (up), w2, w3) lea 24(up), up lea 24(rp), rp inc n jmp L(mb4) -L(mf5): CFPROT_ENDBR - mulx( (up), w0, w1) +L(mf5): mulx( (up), w0, w1) lea 32(up), up lea 32(rp), rp inc n jmp L(mb5) -L(mf6): CFPROT_ENDBR - mulx( (up), w2, w3) +L(mf6): mulx( (up), w2, w3) lea 40(up), up lea 40(rp), rp inc n jmp L(mb6) -L(mf7): CFPROT_ENDBR - mulx( (up), w0, w1) +L(mf7): mulx( (up), w0, w1) lea 48(up), up lea 48(rp), rp inc n jmp L(mb7) -L(mf1): CFPROT_ENDBR - mulx( (up), w0, w1) +L(mf1): mulx( (up), w0, w1) jmp L(mb1) -L(mf2): CFPROT_ENDBR - mulx( (up), w2, w3) +L(mf2): mulx( (up), w2, w3) lea 8(up), up lea 8(rp), rp mulx( (up), w0, w1) @@ -262,39 +254,32 @@ L(outer): lea 8(vp), vp jmp *jaddr -L(f0): CFPROT_ENDBR - mulx( 8,(up), w2, w3) +L(f0): mulx( 8,(up), w2, w3) lea 8(rp,unneg,8), rp lea -1(n), n jmp L(b0) -L(f3): CFPROT_ENDBR - mulx( -16,(up), w0, w1) +L(f3): mulx( -16,(up), w0, w1) lea -56(rp,unneg,8), rp jmp L(b3) -L(f4): CFPROT_ENDBR - mulx( -24,(up), w2, w3) +L(f4): mulx( -24,(up), w2, w3) lea -56(rp,unneg,8), rp jmp L(b4) -L(f5): CFPROT_ENDBR - mulx( -32,(up), w0, w1) +L(f5): mulx( -32,(up), w0, w1) lea -56(rp,unneg,8), rp jmp L(b5) -L(f6): CFPROT_ENDBR - mulx( -40,(up), w2, w3) +L(f6): mulx( -40,(up), w2, w3) lea -56(rp,unneg,8), rp jmp L(b6) -L(f7): CFPROT_ENDBR - mulx( 16,(up), w0, w1) +L(f7): mulx( 16,(up), w0, w1) lea 8(rp,unneg,8), rp jmp L(b7) -L(f1): CFPROT_ENDBR - mulx( (up), w0, w1) +L(f1): mulx( (up), w0, w1) lea 8(rp,unneg,8), rp jmp L(b1) @@ -315,7 +300,7 @@ L(done): FUNC_EXIT() ret -L(f2): CFPROT_ENDBR +L(f2): mulx( -8,(up), w2, w3) lea 8(rp,unneg,8), rp mulx( (up), w0, w1) @@ -380,4 +365,3 @@ L(atab):JMPENT( L(f0), L(atab)) JMPENT( L(f7), L(atab)) TEXT EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreibwl/sqr_basecase.asm b/mpn/x86_64/coreibwl/sqr_basecase.asm index 64e8298..447ba00 100644 --- a/mpn/x86_64/coreibwl/sqr_basecase.asm +++ b/mpn/x86_64/coreibwl/sqr_basecase.asm @@ -184,50 +184,42 @@ ifdef(`PIC', jmp *(%r10,%rax,8) ') -L(mf0): CFPROT_ENDBR - mulx( 8,(up), w2, w3) +L(mf0): mulx( 8,(up), w2, w3) lea 64(up), up C lea (rp), rp jmp L(mb0) -L(mf3): CFPROT_ENDBR - mulx( 8,(up), w0, w1) +L(mf3): mulx( 8,(up), w0, w1) lea 24(up), up lea 24(rp), rp jmp L(mb3) -L(mf4): CFPROT_ENDBR - mulx( 8,(up), w2, w3) +L(mf4): mulx( 8,(up), w2, w3) lea 32(up), up lea 32(rp), rp jmp L(mb4) -L(mf5): CFPROT_ENDBR - mulx( 8,(up), w0, w1) +L(mf5): mulx( 8,(up), w0, w1) lea 40(up), up lea 40(rp), rp jmp L(mb5) -L(mf6): CFPROT_ENDBR - mulx( 8,(up), w2, w3) +L(mf6): mulx( 8,(up), w2, w3) lea 48(up), up lea 48(rp), rp jmp L(mb6) -L(mf7): CFPROT_ENDBR - mulx( 8,(up), w0, w1) +L(mf7): mulx( 8,(up), w0, w1) lea 56(up), up lea 56(rp), rp jmp L(mb7) -L(mf1): CFPROT_ENDBR - mulx( 8,(up), w0, w1) +L(mf1): mulx( 8,(up), w0, w1) lea 8(up), up lea 8(rp), rp jmp L(mb1) -L(mf2): CFPROT_ENDBR - mulx( 8,(up), w2, w3) +L(mf2): mulx( 8,(up), w2, w3) lea 16(up), up lea 16(rp), rp dec R32(n) @@ -283,8 +275,7 @@ L(ed0): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f7): CFPROT_ENDBR - lea -64(up,un_save,8), up +L(f7): lea -64(up,un_save,8), up or R32(un_save), R32(n) mov 8(up), u0 mulx( 16,(up), w0, w1) @@ -335,8 +326,7 @@ L(ed1): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f0): CFPROT_ENDBR - lea -64(up,un_save,8), up +L(f0): lea -64(up,un_save,8), up or R32(un_save), R32(n) mov (up), u0 mulx( 8,(up), w2, w3) @@ -387,8 +377,7 @@ L(ed2): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f1): CFPROT_ENDBR - lea (up,un_save,8), up +L(f1): lea (up,un_save,8), up or R32(un_save), R32(n) lea 8(un_save), un_save mov -8(up), u0 @@ -440,8 +429,7 @@ L(ed3): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f2): CFPROT_ENDBR - lea (up,un_save,8), up +L(f2): lea (up,un_save,8), up or R32(un_save), R32(n) jz L(corner2) mov -16(up), u0 @@ -494,8 +482,7 @@ L(ed4): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f3): CFPROT_ENDBR - lea (up,un_save,8), up +L(f3): lea (up,un_save,8), up or R32(un_save), R32(n) jz L(corner3) mov -24(up), u0 @@ -547,8 +534,7 @@ L(ed5): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f4): CFPROT_ENDBR - lea (up,un_save,8), up +L(f4): lea (up,un_save,8), up or R32(un_save), R32(n) mov -32(up), u0 mulx( -24,(up), w2, w3) @@ -599,8 +585,7 @@ L(ed6): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f5): CFPROT_ENDBR - lea (up,un_save,8), up +L(f5): lea (up,un_save,8), up or R32(un_save), R32(n) mov -40(up), u0 mulx( -32,(up), w0, w1) @@ -651,8 +636,7 @@ L(ed7): adox( (rp), w0) mov w0, (rp) adc %rcx, w1 C relies on rcx = 0 mov w1, 8(rp) -L(f6): CFPROT_ENDBR - lea (up,un_save,8), up +L(f6): lea (up,un_save,8), up or R32(un_save), R32(n) mov -48(up), u0 mulx( -40,(up), w2, w3) @@ -854,4 +838,3 @@ L(atab):JMPENT( L(f6), L(atab)) JMPENT( L(f5), L(atab)) TEXT EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/addmul_2.asm b/mpn/x86_64/coreihwl/addmul_2.asm index 2a5f996..54aebc8 100644 --- a/mpn/x86_64/coreihwl/addmul_2.asm +++ b/mpn/x86_64/coreihwl/addmul_2.asm @@ -236,4 +236,3 @@ L(end): mulx( v0, %rax, w3) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/aorsmul_1.asm b/mpn/x86_64/coreihwl/aorsmul_1.asm index 8c03b17..fd5a26d 100644 --- a/mpn/x86_64/coreihwl/aorsmul_1.asm +++ b/mpn/x86_64/coreihwl/aorsmul_1.asm @@ -196,4 +196,3 @@ L(ret): pop %r13 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/mul_1.asm b/mpn/x86_64/coreihwl/mul_1.asm index b6463f9..1e3c338 100644 --- a/mpn/x86_64/coreihwl/mul_1.asm +++ b/mpn/x86_64/coreihwl/mul_1.asm @@ -153,4 +153,3 @@ L(cj1): mov %rbx, 24(rp) ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreihwl/mul_2.asm b/mpn/x86_64/coreihwl/mul_2.asm index 21defe9..5bdb1aa 100644 --- a/mpn/x86_64/coreihwl/mul_2.asm +++ b/mpn/x86_64/coreihwl/mul_2.asm @@ -171,4 +171,3 @@ L(end): mulx( v1, %rdx, %rax) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/mul_basecase.asm b/mpn/x86_64/coreihwl/mul_basecase.asm index e4a8381..b2656c8 100644 --- a/mpn/x86_64/coreihwl/mul_basecase.asm +++ b/mpn/x86_64/coreihwl/mul_basecase.asm @@ -439,4 +439,3 @@ L(ret2):pop %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/mullo_basecase.asm b/mpn/x86_64/coreihwl/mullo_basecase.asm index 6756802..9986e8b 100644 --- a/mpn/x86_64/coreihwl/mullo_basecase.asm +++ b/mpn/x86_64/coreihwl/mullo_basecase.asm @@ -424,4 +424,3 @@ L(n3): mov (vp), %r9 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/redc_1.asm b/mpn/x86_64/coreihwl/redc_1.asm index b8b4a9e..b1d6c0a 100644 --- a/mpn/x86_64/coreihwl/redc_1.asm +++ b/mpn/x86_64/coreihwl/redc_1.asm @@ -435,4 +435,3 @@ L(ret): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreihwl/sqr_basecase.asm b/mpn/x86_64/coreihwl/sqr_basecase.asm index 8e83470..641cdf3 100644 --- a/mpn/x86_64/coreihwl/sqr_basecase.asm +++ b/mpn/x86_64/coreihwl/sqr_basecase.asm @@ -504,4 +504,3 @@ L(dend):adc %rbx, %rdx FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreinhm/aorrlsh_n.asm b/mpn/x86_64/coreinhm/aorrlsh_n.asm index b1a4610..eed64e7 100644 --- a/mpn/x86_64/coreinhm/aorrlsh_n.asm +++ b/mpn/x86_64/coreinhm/aorrlsh_n.asm @@ -198,4 +198,3 @@ IFDOS(` mov 64(%rsp), %r9 ') C cy sbb R32(%rbx), R32(%rbx) C initialise CF save register jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreinhm/aorsmul_1.asm b/mpn/x86_64/coreinhm/aorsmul_1.asm index e2d96a8..b768905 100644 --- a/mpn/x86_64/coreinhm/aorsmul_1.asm +++ b/mpn/x86_64/coreinhm/aorsmul_1.asm @@ -185,4 +185,3 @@ L(end): mul v0 ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreinhm/redc_1.asm b/mpn/x86_64/coreinhm/redc_1.asm index 782da6b..fc71c1b 100644 --- a/mpn/x86_64/coreinhm/redc_1.asm +++ b/mpn/x86_64/coreinhm/redc_1.asm @@ -547,4 +547,3 @@ L(n3): mov -24(mp), %rax jmp L(ret) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreisbr/addmul_2.asm b/mpn/x86_64/coreisbr/addmul_2.asm index e6ffe3e..21f0bf4 100644 --- a/mpn/x86_64/coreisbr/addmul_2.asm +++ b/mpn/x86_64/coreisbr/addmul_2.asm @@ -222,4 +222,3 @@ L(end): mul v1 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/aorrlshC_n.asm b/mpn/x86_64/coreisbr/aorrlshC_n.asm index 75a9b8c..23ace41 100644 --- a/mpn/x86_64/coreisbr/aorrlshC_n.asm +++ b/mpn/x86_64/coreisbr/aorrlshC_n.asm @@ -171,4 +171,3 @@ L(end): shr $RSH, %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/aorrlsh_n.asm b/mpn/x86_64/coreisbr/aorrlsh_n.asm index 611dcb2..db8ee68 100644 --- a/mpn/x86_64/coreisbr/aorrlsh_n.asm +++ b/mpn/x86_64/coreisbr/aorrlsh_n.asm @@ -213,4 +213,3 @@ IFDOS(` mov 64(%rsp), %r9 ') C cy sbb R32(%rbx), R32(%rbx) C initialise CF save register jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/aors_n.asm b/mpn/x86_64/coreisbr/aors_n.asm index 07fef16..01abf78 100644 --- a/mpn/x86_64/coreisbr/aors_n.asm +++ b/mpn/x86_64/coreisbr/aors_n.asm @@ -196,4 +196,3 @@ PROLOGUE(func_nc) IFDOS(` mov 56(%rsp), %r8 ') jmp L(ent) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/aorsmul_1.asm b/mpn/x86_64/coreisbr/aorsmul_1.asm index 41b8016..9f01d9c 100644 --- a/mpn/x86_64/coreisbr/aorsmul_1.asm +++ b/mpn/x86_64/coreisbr/aorsmul_1.asm @@ -207,4 +207,3 @@ IFDOS(``pop %rsi '') ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreisbr/mul_1.asm b/mpn/x86_64/coreisbr/mul_1.asm index a30f00b..ded7d89 100644 --- a/mpn/x86_64/coreisbr/mul_1.asm +++ b/mpn/x86_64/coreisbr/mul_1.asm @@ -159,4 +159,3 @@ IFDOS(``pop %rdi '') IFDOS(``pop %rsi '') ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/mul_2.asm b/mpn/x86_64/coreisbr/mul_2.asm index 991820b..ffee78a 100644 --- a/mpn/x86_64/coreisbr/mul_2.asm +++ b/mpn/x86_64/coreisbr/mul_2.asm @@ -161,4 +161,3 @@ L(end): mul v0 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/mul_basecase.asm b/mpn/x86_64/coreisbr/mul_basecase.asm index 063664b..35fd1cc 100644 --- a/mpn/x86_64/coreisbr/mul_basecase.asm +++ b/mpn/x86_64/coreisbr/mul_basecase.asm @@ -405,4 +405,3 @@ L(ret2):pop %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/mullo_basecase.asm b/mpn/x86_64/coreisbr/mullo_basecase.asm index 1b75c78..a41a8ac 100644 --- a/mpn/x86_64/coreisbr/mullo_basecase.asm +++ b/mpn/x86_64/coreisbr/mullo_basecase.asm @@ -382,4 +382,3 @@ L(n3): mov (vp_param), %r9 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/popcount.asm b/mpn/x86_64/coreisbr/popcount.asm index 426d3a6..a5be33e 100644 --- a/mpn/x86_64/coreisbr/popcount.asm +++ b/mpn/x86_64/coreisbr/popcount.asm @@ -116,4 +116,3 @@ L(cj1): add %r11, %rax FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/redc_1.asm b/mpn/x86_64/coreisbr/redc_1.asm index 710e60e..f0dbe07 100644 --- a/mpn/x86_64/coreisbr/redc_1.asm +++ b/mpn/x86_64/coreisbr/redc_1.asm @@ -544,4 +544,3 @@ L(n3): mov -32(mp), %rax jmp L(cj) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/coreisbr/rsh1aors_n.asm b/mpn/x86_64/coreisbr/rsh1aors_n.asm index d390ff3..fd2eaea 100644 --- a/mpn/x86_64/coreisbr/rsh1aors_n.asm +++ b/mpn/x86_64/coreisbr/rsh1aors_n.asm @@ -191,4 +191,3 @@ L(end): shrd $1, %rbx, %rbp FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/coreisbr/sqr_basecase.asm b/mpn/x86_64/coreisbr/sqr_basecase.asm index 4d4e545..46a3612 100644 --- a/mpn/x86_64/coreisbr/sqr_basecase.asm +++ b/mpn/x86_64/coreisbr/sqr_basecase.asm @@ -482,4 +482,3 @@ L(dend):add %r8, %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/div_qr_1n_pi1.asm b/mpn/x86_64/div_qr_1n_pi1.asm index 5a4f195..cb072e9 100644 --- a/mpn/x86_64/div_qr_1n_pi1.asm +++ b/mpn/x86_64/div_qr_1n_pi1.asm @@ -245,4 +245,3 @@ L(q_incr_loop): lea 8(U1), U1 jmp L(q_incr_loop) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/div_qr_2n_pi1.asm b/mpn/x86_64/div_qr_2n_pi1.asm index 252781c..5e59a0a 100644 --- a/mpn/x86_64/div_qr_2n_pi1.asm +++ b/mpn/x86_64/div_qr_2n_pi1.asm @@ -156,4 +156,3 @@ L(fix): C Unlikely update. u2 >= d1 sbb d1, u2 jmp L(bck) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/div_qr_2u_pi1.asm b/mpn/x86_64/div_qr_2u_pi1.asm index b47209e..85af96f 100644 --- a/mpn/x86_64/div_qr_2u_pi1.asm +++ b/mpn/x86_64/div_qr_2u_pi1.asm @@ -198,4 +198,3 @@ L(fix_qh): C Unlikely update. u2 >= d1 sbb d1, u2 jmp L(bck_qh) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/dive_1.asm b/mpn/x86_64/dive_1.asm index b401112..988bdab 100644 --- a/mpn/x86_64/dive_1.asm +++ b/mpn/x86_64/dive_1.asm @@ -156,4 +156,3 @@ L(one): shr R8(%rcx), %rax ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/divrem_1.asm b/mpn/x86_64/divrem_1.asm index 0417756..d4d61ad 100644 --- a/mpn/x86_64/divrem_1.asm +++ b/mpn/x86_64/divrem_1.asm @@ -312,4 +312,3 @@ L(ret): pop %rbx FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/divrem_2.asm b/mpn/x86_64/divrem_2.asm index 73aa740..296c9b6 100644 --- a/mpn/x86_64/divrem_2.asm +++ b/mpn/x86_64/divrem_2.asm @@ -188,4 +188,3 @@ L(fix): seta %dl sbb %r11, %rbx jmp L(bck) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastavx/copyd.asm b/mpn/x86_64/fastavx/copyd.asm index 8d4f651..56d472f 100644 --- a/mpn/x86_64/fastavx/copyd.asm +++ b/mpn/x86_64/fastavx/copyd.asm @@ -170,4 +170,3 @@ L(bc): test $4, R8(n) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastavx/copyi.asm b/mpn/x86_64/fastavx/copyi.asm index 3364aa9..7607747 100644 --- a/mpn/x86_64/fastavx/copyi.asm +++ b/mpn/x86_64/fastavx/copyi.asm @@ -167,4 +167,3 @@ L(bc): test $4, R8(n) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/com-palignr.asm b/mpn/x86_64/fastsse/com-palignr.asm index 191e5d9..c7155d1 100644 --- a/mpn/x86_64/fastsse/com-palignr.asm +++ b/mpn/x86_64/fastsse/com-palignr.asm @@ -308,4 +308,3 @@ L(end): test $1, R8(n) 1: FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/com.asm b/mpn/x86_64/fastsse/com.asm index 5dfc8e4..307fb75 100644 --- a/mpn/x86_64/fastsse/com.asm +++ b/mpn/x86_64/fastsse/com.asm @@ -165,4 +165,3 @@ L(sma): add $14, n L(don): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/copyd-palignr.asm b/mpn/x86_64/fastsse/copyd-palignr.asm index a69812c..fac6f8a 100644 --- a/mpn/x86_64/fastsse/copyd-palignr.asm +++ b/mpn/x86_64/fastsse/copyd-palignr.asm @@ -252,4 +252,3 @@ L(end): test $1, R8(n) 1: FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/copyd.asm b/mpn/x86_64/fastsse/copyd.asm index f03affa..5b8b8bf 100644 --- a/mpn/x86_64/fastsse/copyd.asm +++ b/mpn/x86_64/fastsse/copyd.asm @@ -156,4 +156,3 @@ L(sma): test $8, R8(n) L(don): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/copyi-palignr.asm b/mpn/x86_64/fastsse/copyi-palignr.asm index e50f604..22f13f1 100644 --- a/mpn/x86_64/fastsse/copyi-palignr.asm +++ b/mpn/x86_64/fastsse/copyi-palignr.asm @@ -296,4 +296,3 @@ L(end): test $1, R8(n) 1: FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/copyi.asm b/mpn/x86_64/fastsse/copyi.asm index a506942..b2f3b9d 100644 --- a/mpn/x86_64/fastsse/copyi.asm +++ b/mpn/x86_64/fastsse/copyi.asm @@ -175,4 +175,3 @@ dnl jnc 1b L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/lshift-movdqu2.asm b/mpn/x86_64/fastsse/lshift-movdqu2.asm index df8ee6d..a05e850 100644 --- a/mpn/x86_64/fastsse/lshift-movdqu2.asm +++ b/mpn/x86_64/fastsse/lshift-movdqu2.asm @@ -180,4 +180,3 @@ L(end8):movq (ap), %xmm0 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/lshift.asm b/mpn/x86_64/fastsse/lshift.asm index 7d0f0fc..f76972a 100644 --- a/mpn/x86_64/fastsse/lshift.asm +++ b/mpn/x86_64/fastsse/lshift.asm @@ -167,4 +167,3 @@ L(end8):movq (ap), %xmm0 movq %xmm0, (rp) ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/lshiftc-movdqu2.asm b/mpn/x86_64/fastsse/lshiftc-movdqu2.asm index 4878dad..8250910 100644 --- a/mpn/x86_64/fastsse/lshiftc-movdqu2.asm +++ b/mpn/x86_64/fastsse/lshiftc-movdqu2.asm @@ -191,4 +191,3 @@ L(end8):movq (ap), %xmm0 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/lshiftc.asm b/mpn/x86_64/fastsse/lshiftc.asm index f042ec0..d252069 100644 --- a/mpn/x86_64/fastsse/lshiftc.asm +++ b/mpn/x86_64/fastsse/lshiftc.asm @@ -177,4 +177,3 @@ L(end8):movq (ap), %xmm0 movq %xmm0, (rp) ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/rshift-movdqu2.asm b/mpn/x86_64/fastsse/rshift-movdqu2.asm index 8149717..1e270b1 100644 --- a/mpn/x86_64/fastsse/rshift-movdqu2.asm +++ b/mpn/x86_64/fastsse/rshift-movdqu2.asm @@ -199,4 +199,3 @@ L(bc): dec R32(n) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fastsse/sec_tabselect.asm b/mpn/x86_64/fastsse/sec_tabselect.asm index 9975eca..e3df110 100644 --- a/mpn/x86_64/fastsse/sec_tabselect.asm +++ b/mpn/x86_64/fastsse/sec_tabselect.asm @@ -190,4 +190,3 @@ L(tp1): movdqa %xmm8, %xmm0 L(b000):FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/fat/fat_entry.asm b/mpn/x86_64/fat/fat_entry.asm index 5f78553..8f7599d 100644 --- a/mpn/x86_64/fat/fat_entry.asm +++ b/mpn/x86_64/fat/fat_entry.asm @@ -205,4 +205,3 @@ PROLOGUE(__gmpn_cpuid) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/gcd_1.asm b/mpn/x86_64/gcd_1.asm index bf32cc0..ac4aced 100644 --- a/mpn/x86_64/gcd_1.asm +++ b/mpn/x86_64/gcd_1.asm @@ -163,4 +163,3 @@ L(shift_alot): mov %rax, %rcx jmp L(mid) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/invert_limb.asm b/mpn/x86_64/invert_limb.asm index 829861f..cc79b89 100644 --- a/mpn/x86_64/invert_limb.asm +++ b/mpn/x86_64/invert_limb.asm @@ -113,4 +113,3 @@ ifdef(`DARWIN',` ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/invert_limb_table.asm b/mpn/x86_64/invert_limb_table.asm index 16fe314..739d59e 100644 --- a/mpn/x86_64/invert_limb_table.asm +++ b/mpn/x86_64/invert_limb_table.asm @@ -48,4 +48,3 @@ forloop(i,256,512-1,dnl ` .value eval(0x7fd00/i) ')dnl ASM_END() -CF_PROT diff --git a/mpn/x86_64/k10/hamdist.asm b/mpn/x86_64/k10/hamdist.asm index 83e4e86..44b67b5 100644 --- a/mpn/x86_64/k10/hamdist.asm +++ b/mpn/x86_64/k10/hamdist.asm @@ -101,4 +101,3 @@ L(top): mov (ap,n,8), %r8 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k10/popcount.asm b/mpn/x86_64/k10/popcount.asm index 17e7a73..3814aea 100644 --- a/mpn/x86_64/k10/popcount.asm +++ b/mpn/x86_64/k10/popcount.asm @@ -136,4 +136,3 @@ C 1 = n mod 8 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/aorrlsh_n.asm b/mpn/x86_64/k8/aorrlsh_n.asm index 8eff29e..ff3a184 100644 --- a/mpn/x86_64/k8/aorrlsh_n.asm +++ b/mpn/x86_64/k8/aorrlsh_n.asm @@ -215,4 +215,3 @@ L(cj1): mov %r9, 8(rp,n,8) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/div_qr_1n_pi1.asm b/mpn/x86_64/k8/div_qr_1n_pi1.asm index fef3a09..861402b 100644 --- a/mpn/x86_64/k8/div_qr_1n_pi1.asm +++ b/mpn/x86_64/k8/div_qr_1n_pi1.asm @@ -247,4 +247,3 @@ L(q_incr_loop): lea 8(U1), U1 jmp L(q_incr_loop) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/mul_basecase.asm b/mpn/x86_64/k8/mul_basecase.asm index 61b6e0e..ca2efb9 100644 --- a/mpn/x86_64/k8/mul_basecase.asm +++ b/mpn/x86_64/k8/mul_basecase.asm @@ -467,4 +467,3 @@ L(ret): pop %r15 ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/mullo_basecase.asm b/mpn/x86_64/k8/mullo_basecase.asm index b1f5b20..fa00f42 100644 --- a/mpn/x86_64/k8/mullo_basecase.asm +++ b/mpn/x86_64/k8/mullo_basecase.asm @@ -99,14 +99,12 @@ dnl JMPENT( L(2m4), L(tab)) C 10 dnl JMPENT( L(3m4), L(tab)) C 11 TEXT -L(1): CFPROT_ENDBR - imul %r8, %rax +L(1): imul %r8, %rax mov %rax, (rp) FUNC_EXIT() ret -L(2): CFPROT_ENDBR - mov 8(vp_param), %r11 +L(2): mov 8(vp_param), %r11 imul %rax, %r11 C u0 x v1 mul %r8 C u0 x v0 mov %rax, (rp) @@ -117,8 +115,7 @@ L(2): CFPROT_ENDBR FUNC_EXIT() ret -L(3): CFPROT_ENDBR - mov 8(vp_param), %r9 C v1 +L(3): mov 8(vp_param), %r9 C v1 mov 16(vp_param), %r11 mul %r8 C u0 x v0 -> mov %rax, (rp) C r0 @@ -147,8 +144,7 @@ L(0m4): L(1m4): L(2m4): L(3m4): -L(gen): CFPROT_ENDBR - push %rbx +L(gen): push %rbx push %rbp push %r13 push %r14 @@ -438,4 +434,3 @@ L(ret): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/mulmid_basecase.asm b/mpn/x86_64/k8/mulmid_basecase.asm index 0ace1ba..86f1414 100644 --- a/mpn/x86_64/k8/mulmid_basecase.asm +++ b/mpn/x86_64/k8/mulmid_basecase.asm @@ -557,4 +557,3 @@ L(ret): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/redc_1.asm b/mpn/x86_64/k8/redc_1.asm index b00103f..9327b21 100644 --- a/mpn/x86_64/k8/redc_1.asm +++ b/mpn/x86_64/k8/redc_1.asm @@ -124,9 +124,8 @@ L(tab): JMPENT( L(0), L(tab)) JMPENT( L(3m4), L(tab)) TEXT -L(1): CFPROT_ENDBR ALIGN(16) - mov (mp_param), %rax +L(1): mov (mp_param), %rax mul q0 add 8(up), %rax adc 16(up), %rdx @@ -136,9 +135,8 @@ L(1): CFPROT_ENDBR jmp L(ret) -L(2): CFPROT_ENDBR ALIGN(16) - mov (mp_param), %rax +L(2): mov (mp_param), %rax mul q0 xor R32(%r14), R32(%r14) mov %rax, %r10 @@ -173,8 +171,7 @@ L(2): CFPROT_ENDBR jmp L(ret) -L(3): CFPROT_ENDBR - mov (mp_param), %rax +L(3): mov (mp_param), %rax mul q0 mov %rax, %rbx mov %rdx, %r10 @@ -250,8 +247,8 @@ L(3): CFPROT_ENDBR jmp L(ret) -L(2m4): CFPROT_ENDBR ALIGN(16) +L(2m4): L(lo2): mov (mp,nneg,8), %rax mul q0 xor R32(%r14), R32(%r14) @@ -326,8 +323,8 @@ L(le2): add %r10, (up) jmp L(addx) -L(1m4): CFPROT_ENDBR ALIGN(16) +L(1m4): L(lo1): mov (mp,nneg,8), %rax xor %r9, %r9 xor R32(%rbx), R32(%rbx) @@ -399,9 +396,9 @@ L(le1): add %r10, (up) jmp L(addx) -L(0): -L(0m4): CFPROT_ENDBR ALIGN(16) +L(0): +L(0m4): L(lo0): mov (mp,nneg,8), %rax mov nneg, i mul q0 @@ -465,8 +462,8 @@ L(le0): add %r10, (up) jmp L(addy) -L(3m4): CFPROT_ENDBR ALIGN(16) +L(3m4): L(lo3): mov (mp,nneg,8), %rax mul q0 mov %rax, %rbx @@ -592,4 +589,3 @@ L(ret): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/k8/sqr_basecase.asm b/mpn/x86_64/k8/sqr_basecase.asm index e6a545d..60cf945 100644 --- a/mpn/x86_64/k8/sqr_basecase.asm +++ b/mpn/x86_64/k8/sqr_basecase.asm @@ -131,8 +131,7 @@ L(tab): JMPENT( L(4), L(tab)) JMPENT( L(3m4), L(tab)) TEXT -L(1): CFPROT_ENDBR - mov (up), %rax +L(1): mov (up), %rax mul %rax add $40, %rsp mov %rax, (rp) @@ -140,8 +139,7 @@ L(1): CFPROT_ENDBR FUNC_EXIT() ret -L(2): CFPROT_ENDBR - mov (up), %rax +L(2): mov (up), %rax mov %rax, %r8 mul %rax mov 8(up), %r11 @@ -167,8 +165,7 @@ L(2): CFPROT_ENDBR FUNC_EXIT() ret -L(3): CFPROT_ENDBR - mov (up), %rax +L(3): mov (up), %rax mov %rax, %r10 mul %rax mov 8(up), %r11 @@ -213,8 +210,7 @@ L(3): CFPROT_ENDBR FUNC_EXIT() ret -L(4): CFPROT_ENDBR - mov (up), %rax +L(4): mov (up), %rax mov %rax, %r11 mul %rax mov 8(up), %rbx @@ -285,7 +281,7 @@ L(4): CFPROT_ENDBR ret -L(0m4): CFPROT_ENDBR +L(0m4): lea -16(rp,n,8), tp C point tp in middle of result operand mov (up), v0 mov 8(up), %rax @@ -343,7 +339,7 @@ L(L3): xor R32(w1), R32(w1) jmp L(dowhile) -L(1m4): CFPROT_ENDBR +L(1m4): lea 8(rp,n,8), tp C point tp in middle of result operand mov (up), v0 C u0 mov 8(up), %rax C u1 @@ -421,7 +417,7 @@ L(m2x): mov (up,j,8), %rax jmp L(dowhile_end) -L(2m4): CFPROT_ENDBR +L(2m4): lea -16(rp,n,8), tp C point tp in middle of result operand mov (up), v0 mov 8(up), %rax @@ -478,7 +474,7 @@ L(L1): xor R32(w0), R32(w0) jmp L(dowhile_mid) -L(3m4): CFPROT_ENDBR +L(3m4): lea 8(rp,n,8), tp C point tp in middle of result operand mov (up), v0 C u0 mov 8(up), %rax C u1 @@ -809,4 +805,3 @@ L(d1): mov %r11, 24(rp,j,8) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/logops_n.asm b/mpn/x86_64/logops_n.asm index b2c640c..b277f58 100644 --- a/mpn/x86_64/logops_n.asm +++ b/mpn/x86_64/logops_n.asm @@ -134,7 +134,6 @@ L(e10): movq 24(vp,n,8), %r9 L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT ') ifdef(`VARIANT_2',` @@ -188,7 +187,6 @@ L(e10): movq 24(vp,n,8), %r9 L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT ') ifdef(`VARIANT_3',` @@ -243,5 +241,4 @@ L(e10): movq 24(vp,n,8), %r9 L(ret): FUNC_EXIT() ret EPILOGUE() -CF_PROT ') diff --git a/mpn/x86_64/lshift.asm b/mpn/x86_64/lshift.asm index 990b3b8..f368944 100644 --- a/mpn/x86_64/lshift.asm +++ b/mpn/x86_64/lshift.asm @@ -245,4 +245,3 @@ L(ast): mov (up), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/lshiftc.asm b/mpn/x86_64/lshiftc.asm index 4fd4430..c4ba04a 100644 --- a/mpn/x86_64/lshiftc.asm +++ b/mpn/x86_64/lshiftc.asm @@ -180,4 +180,3 @@ L(ast): mov (up), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/lshsub_n.asm b/mpn/x86_64/lshsub_n.asm index d263565..4d428c0 100644 --- a/mpn/x86_64/lshsub_n.asm +++ b/mpn/x86_64/lshsub_n.asm @@ -170,4 +170,3 @@ L(end): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/missing.asm b/mpn/x86_64/missing.asm index 7914b82..9b65c89 100644 --- a/mpn/x86_64/missing.asm +++ b/mpn/x86_64/missing.asm @@ -128,4 +128,3 @@ PROLOGUE(__gmp_adcx) ret EPILOGUE() PROTECT(__gmp_adcx) -CF_PROT diff --git a/mpn/x86_64/mod_1_1.asm b/mpn/x86_64/mod_1_1.asm index 287f61d..09b5dd1 100644 --- a/mpn/x86_64/mod_1_1.asm +++ b/mpn/x86_64/mod_1_1.asm @@ -234,4 +234,3 @@ L(z): ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/mod_1_2.asm b/mpn/x86_64/mod_1_2.asm index 1cd6dd1..09d856e 100644 --- a/mpn/x86_64/mod_1_2.asm +++ b/mpn/x86_64/mod_1_2.asm @@ -237,4 +237,3 @@ ifdef(`SHLD_SLOW',` FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/mod_1_4.asm b/mpn/x86_64/mod_1_4.asm index fb685ef..ae34617 100644 --- a/mpn/x86_64/mod_1_4.asm +++ b/mpn/x86_64/mod_1_4.asm @@ -268,4 +268,3 @@ ifdef(`SHLD_SLOW',` FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/mod_34lsub1.asm b/mpn/x86_64/mod_34lsub1.asm index 2cf5751..62bdcfa 100644 --- a/mpn/x86_64/mod_34lsub1.asm +++ b/mpn/x86_64/mod_34lsub1.asm @@ -135,55 +135,46 @@ L(tab): JMPENT( L(0), L(tab)) JMPENT( L(8), L(tab)) TEXT -L(6): CFPROT_ENDBR - add (ap), %rax +L(6): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx adc $0, %r9 add $24, ap -L(3): CFPROT_ENDBR - add (ap), %rax +L(3): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx jmp L(cj1) -L(7): CFPROT_ENDBR - add (ap), %rax +L(7): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx adc $0, %r9 add $24, ap -L(4): CFPROT_ENDBR - add (ap), %rax +L(4): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx adc $0, %r9 add $24, ap -L(1): CFPROT_ENDBR - add (ap), %rax +L(1): add (ap), %rax adc $0, %rcx jmp L(cj2) -L(8): CFPROT_ENDBR - add (ap), %rax +L(8): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx adc $0, %r9 add $24, ap -L(5): CFPROT_ENDBR - add (ap), %rax +L(5): add (ap), %rax adc 8(ap), %rcx adc 16(ap), %rdx adc $0, %r9 add $24, ap -L(2): CFPROT_ENDBR - add (ap), %rax +L(2): add (ap), %rax adc 8(ap), %rcx L(cj2): adc $0, %rdx L(cj1): adc $0, %r9 -L(0): CFPROT_ENDBR - add %r9, %rax +L(0): add %r9, %rax adc $0, %rcx adc $0, %rdx adc $0, %rax @@ -212,4 +203,3 @@ L(0): CFPROT_ENDBR FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/mode1o.asm b/mpn/x86_64/mode1o.asm index c10a5a6..2cd2b08 100644 --- a/mpn/x86_64/mode1o.asm +++ b/mpn/x86_64/mode1o.asm @@ -169,4 +169,3 @@ L(one): EPILOGUE(mpn_modexact_1c_odd) EPILOGUE(mpn_modexact_1_odd) -CF_PROT diff --git a/mpn/x86_64/mul_1.asm b/mpn/x86_64/mul_1.asm index 6ea9a4a..b032afc 100644 --- a/mpn/x86_64/mul_1.asm +++ b/mpn/x86_64/mul_1.asm @@ -181,4 +181,3 @@ IFDOS(``pop %rdi '') IFDOS(``pop %rsi '') ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/mul_2.asm b/mpn/x86_64/mul_2.asm index 6b73737..f408c52 100644 --- a/mpn/x86_64/mul_2.asm +++ b/mpn/x86_64/mul_2.asm @@ -190,4 +190,3 @@ L(m22): mul v1 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/mulx/aorsmul_1.asm b/mpn/x86_64/mulx/aorsmul_1.asm index 942cf6a..285c073 100644 --- a/mpn/x86_64/mulx/aorsmul_1.asm +++ b/mpn/x86_64/mulx/aorsmul_1.asm @@ -159,4 +159,3 @@ L(wd1): ADCSBB %rbx, 24(rp) ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/mulx/mul_1.asm b/mpn/x86_64/mulx/mul_1.asm index 4a0e6ef..34a044d 100644 --- a/mpn/x86_64/mulx/mul_1.asm +++ b/mpn/x86_64/mulx/mul_1.asm @@ -152,4 +152,3 @@ L(wd1): adc %r12, %rbx ret EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/nano/dive_1.asm b/mpn/x86_64/nano/dive_1.asm index d57c444..e9a0763 100644 --- a/mpn/x86_64/nano/dive_1.asm +++ b/mpn/x86_64/nano/dive_1.asm @@ -164,4 +164,3 @@ L(one): shr R8(%rcx), %rax FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/aors_n.asm b/mpn/x86_64/pentium4/aors_n.asm index d3daf6f..8e6ee1b 100644 --- a/mpn/x86_64/pentium4/aors_n.asm +++ b/mpn/x86_64/pentium4/aors_n.asm @@ -194,4 +194,3 @@ L(ret): mov R32(%rbx), R32(%rax) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/aorslshC_n.asm b/mpn/x86_64/pentium4/aorslshC_n.asm index a4cd689..d03c6a3 100644 --- a/mpn/x86_64/pentium4/aorslshC_n.asm +++ b/mpn/x86_64/pentium4/aorslshC_n.asm @@ -201,4 +201,3 @@ L(c3): mov $1, R8(%rax) jmp L(rc3) EPILOGUE() ASM_END() -CF_PROT diff --git a/mpn/x86_64/pentium4/lshift.asm b/mpn/x86_64/pentium4/lshift.asm index baa4820..d3b5213 100644 --- a/mpn/x86_64/pentium4/lshift.asm +++ b/mpn/x86_64/pentium4/lshift.asm @@ -164,4 +164,3 @@ L(ast): movq (up), %mm2 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/lshiftc.asm b/mpn/x86_64/pentium4/lshiftc.asm index e7ed07f..fc64676 100644 --- a/mpn/x86_64/pentium4/lshiftc.asm +++ b/mpn/x86_64/pentium4/lshiftc.asm @@ -177,4 +177,3 @@ L(ast): movq (up), %mm2 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/mod_34lsub1.asm b/mpn/x86_64/pentium4/mod_34lsub1.asm index adb4ae6..f34b3f0 100644 --- a/mpn/x86_64/pentium4/mod_34lsub1.asm +++ b/mpn/x86_64/pentium4/mod_34lsub1.asm @@ -165,4 +165,3 @@ L(combine): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/rsh1aors_n.asm b/mpn/x86_64/pentium4/rsh1aors_n.asm index 64a6322..5528ce4 100644 --- a/mpn/x86_64/pentium4/rsh1aors_n.asm +++ b/mpn/x86_64/pentium4/rsh1aors_n.asm @@ -332,4 +332,3 @@ L(cj1): or %r14, %rbx L(c3): mov $1, R8(%rax) jmp L(rc3) EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/pentium4/rshift.asm b/mpn/x86_64/pentium4/rshift.asm index 758ca64..b7c1ee2 100644 --- a/mpn/x86_64/pentium4/rshift.asm +++ b/mpn/x86_64/pentium4/rshift.asm @@ -167,4 +167,3 @@ L(ast): movq (up), %mm2 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/popham.asm b/mpn/x86_64/popham.asm index a52ea0f..9005f81 100644 --- a/mpn/x86_64/popham.asm +++ b/mpn/x86_64/popham.asm @@ -175,4 +175,3 @@ L(end): FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/rsh1aors_n.asm b/mpn/x86_64/rsh1aors_n.asm index d28cc32..a3e9cc5 100644 --- a/mpn/x86_64/rsh1aors_n.asm +++ b/mpn/x86_64/rsh1aors_n.asm @@ -187,4 +187,3 @@ L(end): mov %rbx, (rp) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/rshift.asm b/mpn/x86_64/rshift.asm index 2c45172..3f344f1 100644 --- a/mpn/x86_64/rshift.asm +++ b/mpn/x86_64/rshift.asm @@ -174,4 +174,3 @@ L(ast): mov (up), %r10 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/sec_tabselect.asm b/mpn/x86_64/sec_tabselect.asm index 2198b4b..e8aed26 100644 --- a/mpn/x86_64/sec_tabselect.asm +++ b/mpn/x86_64/sec_tabselect.asm @@ -174,4 +174,3 @@ L(b00): pop %r15 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/sqr_diag_addlsh1.asm b/mpn/x86_64/sqr_diag_addlsh1.asm index 6db16f6..4ad034c 100644 --- a/mpn/x86_64/sqr_diag_addlsh1.asm +++ b/mpn/x86_64/sqr_diag_addlsh1.asm @@ -114,4 +114,3 @@ L(end): add %r10, %r8 FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/sublsh1_n.asm b/mpn/x86_64/sublsh1_n.asm index 2f0fe01..c6d829f 100644 --- a/mpn/x86_64/sublsh1_n.asm +++ b/mpn/x86_64/sublsh1_n.asm @@ -158,4 +158,3 @@ L(end): add R32(%rbp), R32(%rax) FUNC_EXIT() ret EPILOGUE() -CF_PROT diff --git a/mpn/x86_64/x86_64-defs.m4 b/mpn/x86_64/x86_64-defs.m4 index 80f549e..a626419 100644 --- a/mpn/x86_64/x86_64-defs.m4 +++ b/mpn/x86_64/x86_64-defs.m4 @@ -93,38 +93,8 @@ m4_assert_numargs(1) ` GLOBL $1 TYPE($1,`function') $1: - CFPROT_ENDBR ') -dnl Generates the endbr64 instructions -dnl Using macro, so it can be easily extended to use some arch specific conditional defines -define(`CFPROT_ENDBR', -`` - endbr64'' -) - -dnl Append the .gnu-property to the end of files -dnl This is needed for a -fcf-protection -dnl Again, using macro for easy arch specific defines -dnl -define(`CF_PROT',`` - .section .note.gnu.property,"a" - .align 8 - .long 1f - 0f - .long 4f - 1f - .long 5 -0: - .string "GNU" -1: - .align 8 - .long 0xc0000002 - .long 3f - 2f -2: - .long 0x3 -3: - .align 8 -4: -'') dnl Usage: ASSERT([cond][,instructions]) dnl