From 08f87f769b595151be1afeff53e144f543faa614 Mon Sep 17 00:00:00 2001
From: hc <hc@nodka.com>
Date: Wed, 06 Dec 2023 09:51:13 +0000
Subject: [PATCH] add dts config

---
 kernel/arch/parisc/kernel/pacache.S |  340 ++++++++++++++++++++++++++++++++++++++++++--------------
 1 files changed, 253 insertions(+), 87 deletions(-)

diff --git a/kernel/arch/parisc/kernel/pacache.S b/kernel/arch/parisc/kernel/pacache.S
index f33bf2d..b2ba6d6 100644
--- a/kernel/arch/parisc/kernel/pacache.S
+++ b/kernel/arch/parisc/kernel/pacache.S
@@ -1,22 +1,9 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
 /*
  *  PARISC TLB and cache flushing support
  *  Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  *  Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  *  Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
- *
- *    This program is free software; you can redistribute it and/or modify
- *    it under the terms of the GNU General Public License as published by
- *    the Free Software Foundation; either version 2, or (at your option)
- *    any later version.
- *
- *    This program is distributed in the hope that it will be useful,
- *    but WITHOUT ANY WARRANTY; without even the implied warranty of
- *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *    GNU General Public License for more details.
- *
- *    You should have received a copy of the GNU General Public License
- *    along with this program; if not, write to the Free Software
- *    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
  */
 
 /*
@@ -34,11 +21,12 @@
 
 #include <asm/psw.h>
 #include <asm/assembly.h>
-#include <asm/pgtable.h>
 #include <asm/cache.h>
 #include <asm/ldcw.h>
+#include <asm/alternative.h>
 #include <linux/linkage.h>
 #include <linux/init.h>
+#include <linux/pgtable.h>
 
 	.section .text.hot
 	.align	16
@@ -75,7 +63,7 @@
 
 	/* Flush Instruction Tlb */
 
-	LDREG		ITLB_SID_BASE(%r1), %r20
+88:	LDREG		ITLB_SID_BASE(%r1), %r20
 	LDREG		ITLB_SID_STRIDE(%r1), %r21
 	LDREG		ITLB_SID_COUNT(%r1), %r22
 	LDREG		ITLB_OFF_BASE(%r1), %arg0
@@ -115,6 +103,7 @@
 	add		%r21, %r20, %r20		/* increment space */
 
 fitdone:
+	ALTERNATIVE(88b, fitdone, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
 
 	/* Flush Data Tlb */
 
@@ -185,12 +174,21 @@
 
 2:      bv		%r0(%r2)
 	nop
+
+	/*
+	 * When running in qemu, drop whole flush_tlb_all_local function and
+	 * replace by one pdtlbe instruction, for which QEMU will drop all
+	 * local TLB entries.
+	 */
+3:	pdtlbe		%r0(%sr1,%r0)
+	bv,n		%r0(%r2)
+	ALTERNATIVE_CODE(flush_tlb_all_local, 2, ALT_COND_RUN_ON_QEMU, 3b)
 ENDPROC_CFI(flush_tlb_all_local)
 
 	.import cache_info,data
 
 ENTRY_CFI(flush_instruction_cache_local)
-	load32		cache_info, %r1
+88:	load32		cache_info, %r1
 
 	/* Flush Instruction Cache */
 
@@ -243,6 +241,7 @@
 fisync:
 	sync
 	mtsm		%r22			/* restore I-bit */
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_instruction_cache_local)
@@ -250,7 +249,7 @@
 
 	.import cache_info, data
 ENTRY_CFI(flush_data_cache_local)
-	load32		cache_info, %r1
+88:	load32		cache_info, %r1
 
 	/* Flush Data Cache */
 
@@ -304,39 +303,10 @@
 	syncdma
 	sync
 	mtsm		%r22			/* restore I-bit */
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_data_cache_local)
-
-/* Macros to serialize TLB purge operations on SMP.  */
-
-	.macro	tlb_lock	la,flags,tmp
-#ifdef CONFIG_SMP
-#if __PA_LDCW_ALIGNMENT > 4
-	load32		pa_tlb_lock + __PA_LDCW_ALIGNMENT-1, \la
-	depi		0,31,__PA_LDCW_ALIGN_ORDER, \la
-#else
-	load32		pa_tlb_lock, \la
-#endif
-	rsm		PSW_SM_I,\flags
-1:	LDCW		0(\la),\tmp
-	cmpib,<>,n	0,\tmp,3f
-2:	ldw		0(\la),\tmp
-	cmpb,<>		%r0,\tmp,1b
-	nop
-	b,n		2b
-3:
-#endif
-	.endm
-
-	.macro	tlb_unlock	la,flags,tmp
-#ifdef CONFIG_SMP
-	ldi		1,\tmp
-	sync
-	stw		\tmp,0(\la)
-	mtsm		\flags
-#endif
-	.endm
 
 /* Clear page using kernel mapping.  */
 
@@ -595,10 +565,10 @@
 	pdtlb,l		%r0(%r28)
 	pdtlb,l		%r0(%r29)
 #else
-	tlb_lock	%r20,%r21,%r22
-	pdtlb		%r0(%r28)
-	pdtlb		%r0(%r29)
-	tlb_unlock	%r20,%r21,%r22
+0:	pdtlb		%r0(%r28)
+1:	pdtlb		%r0(%r29)
+	ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
+	ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
 #endif
 
 #ifdef CONFIG_64BIT
@@ -735,9 +705,8 @@
 #ifdef CONFIG_PA20
 	pdtlb,l		%r0(%r28)
 #else
-	tlb_lock	%r20,%r21,%r22
-	pdtlb		%r0(%r28)
-	tlb_unlock	%r20,%r21,%r22
+0:	pdtlb		%r0(%r28)
+	ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
 #endif
 
 #ifdef CONFIG_64BIT
@@ -812,12 +781,11 @@
 #ifdef CONFIG_PA20
 	pdtlb,l		%r0(%r28)
 #else
-	tlb_lock	%r20,%r21,%r22
-	pdtlb		%r0(%r28)
-	tlb_unlock	%r20,%r21,%r22
+0:	pdtlb		%r0(%r28)
+	ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
 #endif
 
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), r31
 
 #ifdef CONFIG_64BIT
@@ -828,8 +796,7 @@
 	add		%r28, %r25, %r25
 	sub		%r25, r31, %r25
 
-
-1:      fdc,m		r31(%r28)
+1:	fdc,m		r31(%r28)
 	fdc,m		r31(%r28)
 	fdc,m		r31(%r28)
 	fdc,m		r31(%r28)
@@ -844,13 +811,73 @@
 	fdc,m		r31(%r28)
 	fdc,m		r31(%r28)
 	fdc,m		r31(%r28)
-	cmpb,COND(<<)	%r28, %r25,1b
+	cmpb,COND(>>)	%r25, %r28, 1b /* predict taken */
 	fdc,m		r31(%r28)
 
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_dcache_page_asm)
+
+ENTRY_CFI(purge_dcache_page_asm)
+	ldil		L%(TMPALIAS_MAP_START), %r28
+#ifdef CONFIG_64BIT
+#if (TMPALIAS_MAP_START >= 0x80000000)
+	depdi		0, 31,32, %r28		/* clear any sign extension */
+#endif
+	convert_phys_for_tlb_insert20 %r26	/* convert phys addr to tlb insert format */
+	depd		%r25, 63,22, %r28	/* Form aliased virtual address 'to' */
+	depdi		0, 63,PAGE_SHIFT, %r28	/* Clear any offset bits */
+#else
+	extrw,u		%r26, 24,25, %r26	/* convert phys addr to tlb insert format */
+	depw		%r25, 31,22, %r28	/* Form aliased virtual address 'to' */
+	depwi		0, 31,PAGE_SHIFT, %r28	/* Clear any offset bits */
+#endif
+
+	/* Purge any old translation */
+
+#ifdef CONFIG_PA20
+	pdtlb,l		%r0(%r28)
+#else
+0:	pdtlb		%r0(%r28)
+	ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
+#endif
+
+88:	ldil		L%dcache_stride, %r1
+	ldw		R%dcache_stride(%r1), r31
+
+#ifdef CONFIG_64BIT
+	depdi,z		1, 63-PAGE_SHIFT,1, %r25
+#else
+	depwi,z		1, 31-PAGE_SHIFT,1, %r25
+#endif
+	add		%r28, %r25, %r25
+	sub		%r25, r31, %r25
+
+1:      pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	pdc,m		r31(%r28)
+	cmpb,COND(>>)	%r25, %r28, 1b /* predict taken */
+	pdc,m		r31(%r28)
+
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
+	sync
+	bv		%r0(%r2)
+	nop
+ENDPROC_CFI(purge_dcache_page_asm)
 
 ENTRY_CFI(flush_icache_page_asm)
 	ldil		L%(TMPALIAS_MAP_START), %r28
@@ -874,15 +901,17 @@
 
 #ifdef CONFIG_PA20
 	pdtlb,l		%r0(%r28)
-	pitlb,l         %r0(%sr4,%r28)
+1:	pitlb,l         %r0(%sr4,%r28)
+	ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
 #else
-	tlb_lock        %r20,%r21,%r22
-	pdtlb		%r0(%r28)
-	pitlb           %r0(%sr4,%r28)
-	tlb_unlock      %r20,%r21,%r22
+0:	pdtlb		%r0(%r28)
+1:	pitlb           %r0(%sr4,%r28)
+	ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
+	ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
+	ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
 #endif
 
-	ldil		L%icache_stride, %r1
+88:	ldil		L%icache_stride, %r1
 	ldw		R%icache_stride(%r1), %r31
 
 #ifdef CONFIG_64BIT
@@ -892,7 +921,6 @@
 #endif
 	add		%r28, %r25, %r25
 	sub		%r25, %r31, %r25
-
 
 	/* fic only has the type 26 form on PA1.1, requiring an
 	 * explicit space specification, so use %sr4 */
@@ -911,16 +939,17 @@
 	fic,m		%r31(%sr4,%r28)
 	fic,m		%r31(%sr4,%r28)
 	fic,m		%r31(%sr4,%r28)
-	cmpb,COND(<<)	%r28, %r25,1b
+	cmpb,COND(>>)	%r25, %r28, 1b /* predict taken */
 	fic,m		%r31(%sr4,%r28)
 
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_icache_page_asm)
 
 ENTRY_CFI(flush_kernel_dcache_page_asm)
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), %r23
 
 #ifdef CONFIG_64BIT
@@ -930,7 +959,6 @@
 #endif
 	add		%r26, %r25, %r25
 	sub		%r25, %r23, %r25
-
 
 1:      fdc,m		%r23(%r26)
 	fdc,m		%r23(%r26)
@@ -947,16 +975,17 @@
 	fdc,m		%r23(%r26)
 	fdc,m		%r23(%r26)
 	fdc,m		%r23(%r26)
-	cmpb,COND(<<)		%r26, %r25,1b
+	cmpb,COND(>>)	%r25, %r26, 1b /* predict taken */
 	fdc,m		%r23(%r26)
 
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_kernel_dcache_page_asm)
 
 ENTRY_CFI(purge_kernel_dcache_page_asm)
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), %r23
 
 #ifdef CONFIG_64BIT
@@ -982,74 +1011,183 @@
 	pdc,m		%r23(%r26)
 	pdc,m		%r23(%r26)
 	pdc,m		%r23(%r26)
-	cmpb,COND(<<)		%r26, %r25, 1b
+	cmpb,COND(>>)	%r25, %r26, 1b /* predict taken */
 	pdc,m		%r23(%r26)
 
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(purge_kernel_dcache_page_asm)
 
 ENTRY_CFI(flush_user_dcache_range_asm)
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), %r23
 	ldo		-1(%r23), %r21
 	ANDCM		%r26, %r21, %r26
 
-1:      cmpb,COND(<<),n	%r26, %r25, 1b
+#ifdef CONFIG_64BIT
+	depd,z		%r23, 59, 60, %r21
+#else
+	depw,z		%r23, 27, 28, %r21
+#endif
+	add		%r26, %r21, %r22
+	cmpb,COND(>>),n	%r22, %r25, 2f /* predict not taken */
+1:	add		%r22, %r21, %r22
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	fdc,m		%r23(%sr3, %r26)
+	cmpb,COND(<<=)	%r22, %r25, 1b /* predict taken */
 	fdc,m		%r23(%sr3, %r26)
 
+2:	cmpb,COND(>>),n	%r25, %r26, 2b
+	fdc,m		%r23(%sr3, %r26)
+
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_user_dcache_range_asm)
 
 ENTRY_CFI(flush_kernel_dcache_range_asm)
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), %r23
 	ldo		-1(%r23), %r21
 	ANDCM		%r26, %r21, %r26
 
-1:      cmpb,COND(<<),n	%r26, %r25,1b
+#ifdef CONFIG_64BIT
+	depd,z		%r23, 59, 60, %r21
+#else
+	depw,z		%r23, 27, 28, %r21
+#endif
+	add		%r26, %r21, %r22
+	cmpb,COND(>>),n	%r22, %r25, 2f /* predict not taken */
+1:	add		%r22, %r21, %r22
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	fdc,m		%r23(%r26)
+	cmpb,COND(<<=)	%r22, %r25, 1b /* predict taken */
+	fdc,m		%r23(%r26)
+
+2:	cmpb,COND(>>),n	%r25, %r26, 2b /* predict taken */
 	fdc,m		%r23(%r26)
 
 	sync
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	syncdma
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_kernel_dcache_range_asm)
 
 ENTRY_CFI(purge_kernel_dcache_range_asm)
-	ldil		L%dcache_stride, %r1
+88:	ldil		L%dcache_stride, %r1
 	ldw		R%dcache_stride(%r1), %r23
 	ldo		-1(%r23), %r21
 	ANDCM		%r26, %r21, %r26
 
-1:      cmpb,COND(<<),n	%r26, %r25,1b
+#ifdef CONFIG_64BIT
+	depd,z		%r23, 59, 60, %r21
+#else
+	depw,z		%r23, 27, 28, %r21
+#endif
+	add		%r26, %r21, %r22
+	cmpb,COND(>>),n	%r22, %r25, 2f /* predict not taken */
+1:	add		%r22, %r21, %r22
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	pdc,m		%r23(%r26)
+	cmpb,COND(<<=)	%r22, %r25, 1b /* predict taken */
+	pdc,m		%r23(%r26)
+
+2:	cmpb,COND(>>),n	%r25, %r26, 2b /* predict taken */
 	pdc,m		%r23(%r26)
 
 	sync
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
 	syncdma
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(purge_kernel_dcache_range_asm)
 
 ENTRY_CFI(flush_user_icache_range_asm)
-	ldil		L%icache_stride, %r1
+88:	ldil		L%icache_stride, %r1
 	ldw		R%icache_stride(%r1), %r23
 	ldo		-1(%r23), %r21
 	ANDCM		%r26, %r21, %r26
 
-1:      cmpb,COND(<<),n	%r26, %r25,1b
+#ifdef CONFIG_64BIT
+	depd,z		%r23, 59, 60, %r21
+#else
+	depw,z		%r23, 27, 28, %r21
+#endif
+	add		%r26, %r21, %r22
+	cmpb,COND(>>),n	%r22, %r25, 2f /* predict not taken */
+1:	add		%r22, %r21, %r22
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	fic,m		%r23(%sr3, %r26)
+	cmpb,COND(<<=)	%r22, %r25, 1b /* predict taken */
 	fic,m		%r23(%sr3, %r26)
 
+2:	cmpb,COND(>>),n	%r25, %r26, 2b
+	fic,m		%r23(%sr3, %r26)
+
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_user_icache_range_asm)
 
 ENTRY_CFI(flush_kernel_icache_page)
-	ldil		L%icache_stride, %r1
+88:	ldil		L%icache_stride, %r1
 	ldw		R%icache_stride(%r1), %r23
 
 #ifdef CONFIG_64BIT
@@ -1076,23 +1214,51 @@
 	fic,m		%r23(%sr4, %r26)
 	fic,m		%r23(%sr4, %r26)
 	fic,m		%r23(%sr4, %r26)
-	cmpb,COND(<<)		%r26, %r25, 1b
+	cmpb,COND(>>)	%r25, %r26, 1b /* predict taken */
 	fic,m		%r23(%sr4, %r26)
 
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop
 ENDPROC_CFI(flush_kernel_icache_page)
 
 ENTRY_CFI(flush_kernel_icache_range_asm)
-	ldil		L%icache_stride, %r1
+88:	ldil		L%icache_stride, %r1
 	ldw		R%icache_stride(%r1), %r23
 	ldo		-1(%r23), %r21
 	ANDCM		%r26, %r21, %r26
 
-1:      cmpb,COND(<<),n	%r26, %r25, 1b
+#ifdef CONFIG_64BIT
+	depd,z		%r23, 59, 60, %r21
+#else
+	depw,z		%r23, 27, 28, %r21
+#endif
+	add		%r26, %r21, %r22
+	cmpb,COND(>>),n	%r22, %r25, 2f /* predict not taken */
+1:	add		%r22, %r21, %r22
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	fic,m		%r23(%sr4, %r26)
+	cmpb,COND(<<=)	%r22, %r25, 1b /* predict taken */
 	fic,m		%r23(%sr4, %r26)
 
+2:	cmpb,COND(>>),n	%r25, %r26, 2b /* predict taken */
+	fic,m		%r23(%sr4, %r26)
+
+89:	ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
 	sync
 	bv		%r0(%r2)
 	nop

--
Gitblit v1.6.2