]> git.hungrycats.org Git - linux/commitdiff
[PATCH] PPC64 Replace cmp instructions with cmpw/cmpd
authorDavid Woodhouse <dwmw2@infradead.org>
Fri, 8 Oct 2004 00:36:04 +0000 (17:36 -0700)
committerLinus Torvalds <torvalds@ppc970.osdl.org>
Fri, 8 Oct 2004 00:36:04 +0000 (17:36 -0700)
On Fri, 2004-10-08 at 15:37 +1000, Paul Mackerras wrote:
> This patch replaces cmp{,l}{,i} with cmp{,l}[wd]{,i} as appropriate.
> The original patch was from Segher Boessenkool, slightly modified by
> me.

And also for ppc32 and arch/ppc64/kernel/ItLpQueue.c...

Signed-off-by: David Woodhouse <dwmw2@infradead.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
arch/ppc/boot/common/util.S
arch/ppc/boot/openfirmware/misc.S
arch/ppc/boot/simple/relocate.S
arch/ppc/kernel/cpu_setup_6xx.S
arch/ppc/kernel/entry.S
arch/ppc/kernel/head.S
arch/ppc/kernel/idle_6xx.S
arch/ppc/kernel/idle_power4.S
arch/ppc/kernel/misc.S
arch/ppc/lib/checksum.S
arch/ppc64/kernel/ItLpQueue.c

index 93771e298a4bf44d0596dad6d8b9715632516957..7c4fb8f57953146459030571bb159d37e4d30885 100644 (file)
@@ -41,7 +41,7 @@ disable_6xx_mmu:
        /* Test for a 601 */
        mfpvr   r10
        srwi    r10,r10,16
-       cmp   0,r10,1         /* 601 ? */
+       cmpwi   0,r10,1         /* 601 ? */
        beq     .clearbats_601
 
        /* Clear BATs */
@@ -117,9 +117,9 @@ _setup_L2CR:
        /* Wait for the invalidation to complete */
        mfspr   r8,PVR
        srwi    r8,r8,16
-       cmpl  cr0,r8,0x8000                   /* 7450 */
-       cmpl  cr1,r8,0x8001                   /* 7455 */
-       cmpl  cr2,r8,0x8002                   /* 7457 */
+       cmplwi  cr0,r8,0x8000                   /* 7450 */
+       cmplwi  cr1,r8,0x8001                   /* 7455 */
+       cmplwi  cr2,r8,0x8002                   /* 7457 */
        cror    4*cr0+eq,4*cr0+eq,4*cr1+eq      /* Now test if any are true. */
        cror    4*cr0+eq,4*cr0+eq,4*cr2+eq
        bne     2f
@@ -190,7 +190,7 @@ timebase_period_ns:
 udelay:
        mfspr   r4,PVR
        srwi    r4,r4,16
-       cmp   0,r4,1          /* 601 ? */
+       cmpwi   0,r4,1          /* 601 ? */
        bne     .udelay_not_601
 00:    li      r0,86   /* Instructions / microsecond? */
        mtctr   r0
@@ -213,16 +213,16 @@ udelay:
 1:     mftbu   r5
        mftb    r6
        mftbu   r7
-       cmp     0,r5,r7
+       cmpw    0,r5,r7
        bne     1b              /* Get [synced] base time */
        addc    r9,r6,r4        /* Compute end time */
        addze   r8,r5
 2:     mftbu   r5
-       cmp     0,r5,r8
+       cmpw    0,r5,r8
        blt     2b
        bgt     3f
        mftb    r6
-       cmp     0,r6,r9
+       cmpw    0,r6,r9
        blt     2b
 3:     blr
 
index 844c0bca2642e1fe53872f59ee2ae85cdd98af9d..ab9e897cadd040dfa3df462d354d9237ce1ee6b7 100644 (file)
@@ -16,7 +16,7 @@
 setup_bats:
        mfpvr   5
        rlwinm  5,5,16,16,31            /* r3 = 1 for 601, 4 for 604 */
-       cmp   0,5,1
+       cmpwi   0,5,1
        li      0,0
        bne     4f
        mtibatl 3,0                     /* invalidate BAT first */
index e57d9673e599699c874de7854ef90550b60bc060..555a216ccc4926b2e22292c38926d41c2b23a085 100644 (file)
@@ -50,7 +50,7 @@ relocate:
         * Check if we need to relocate ourselves to the link addr or were
         * we loaded there to begin with.
         */
-       cmp     cr0,r3,r4
+       cmpw    cr0,r3,r4
        beq     start_ldr       /* If 0, we don't need to relocate */
 
        /* Move this code somewhere safe.  This is max(load + size, end)
@@ -122,7 +122,7 @@ do_relocate:
        GETSYM(r4, start)
        mr      r3,r8           /* Get the load addr */
 
-       cmp     cr0,r4,r3       /* If we need to copy from the end, do so */
+       cmpw    cr0,r4,r3       /* If we need to copy from the end, do so */
        bgt     do_relocate_from_end
 
 do_relocate_from_start:
@@ -165,7 +165,7 @@ start_ldr:
        subi    r4,r4,4
        li      r0,0
 50:    stwu    r0,4(r3)
-       cmp     cr0,r3,r4
+       cmpw    cr0,r3,r4
        bne     50b
 90:    mr      r9,r1           /* Save old stack pointer (in case it matters) */
        lis     r1,.stack@h
index 6ba5ef411dd0a51939b997ad12f431f9badc5072..e42049f3ee4c6a90541e6d9098a9779b9cbf9e58 100644 (file)
@@ -172,9 +172,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_NO_DPM)
 setup_750cx:
        mfspr   r10, SPRN_HID1
        rlwinm  r10,r10,4,28,31
-       cmp   cr0,r10,7
-       cmp   cr1,r10,9
-       cmp   cr2,r10,11
+       cmpwi   cr0,r10,7
+       cmpwi   cr1,r10,9
+       cmpwi   cr2,r10,11
        cror    4*cr0+eq,4*cr0+eq,4*cr1+eq
        cror    4*cr0+eq,4*cr0+eq,4*cr2+eq
        bnelr
@@ -287,12 +287,12 @@ _GLOBAL(__save_cpu_setup)
        /* Now deal with CPU type dependent registers */
        mfspr   r3,PVR
        srwi    r3,r3,16
-       cmpl  cr0,r3,0x8000   /* 7450 */
-       cmpl  cr1,r3,0x000c   /* 7400 */
-       cmpl  cr2,r3,0x800c   /* 7410 */
-       cmpl  cr3,r3,0x8001   /* 7455 */
-       cmpl  cr4,r3,0x8002   /* 7457 */
-       cmpl  cr5,r3,0x7000   /* 750FX */
+       cmplwi  cr0,r3,0x8000   /* 7450 */
+       cmplwi  cr1,r3,0x000c   /* 7400 */
+       cmplwi  cr2,r3,0x800c   /* 7410 */
+       cmplwi  cr3,r3,0x8001   /* 7455 */
+       cmplwi  cr4,r3,0x8002   /* 7457 */
+       cmplwi  cr5,r3,0x7000   /* 750FX */
        /* cr1 is 7400 || 7410 */
        cror    4*cr1+eq,4*cr1+eq,4*cr2+eq
        /* cr0 is 74xx */
@@ -323,7 +323,7 @@ _GLOBAL(__save_cpu_setup)
        /* If rev 2.x, backup HID2 */
        mfspr   r3,PVR
        andi.   r3,r3,0xff00
-       cmp   cr0,r3,0x0200
+       cmpwi   cr0,r3,0x0200
        bne     1f
        mfspr   r4,SPRN_HID2
        stw     r4,CS_HID2(r5)
@@ -354,12 +354,12 @@ _GLOBAL(__restore_cpu_setup)
        /* Now deal with CPU type dependent registers */
        mfspr   r3,PVR
        srwi    r3,r3,16
-       cmpl  cr0,r3,0x8000   /* 7450 */
-       cmpl  cr1,r3,0x000c   /* 7400 */
-       cmpl  cr2,r3,0x800c   /* 7410 */
-       cmpl  cr3,r3,0x8001   /* 7455 */
-       cmpl  cr4,r3,0x8002   /* 7457 */
-       cmpl  cr5,r3,0x7000   /* 750FX */
+       cmplwi  cr0,r3,0x8000   /* 7450 */
+       cmplwi  cr1,r3,0x000c   /* 7400 */
+       cmplwi  cr2,r3,0x800c   /* 7410 */
+       cmplwi  cr3,r3,0x8001   /* 7455 */
+       cmplwi  cr4,r3,0x8002   /* 7457 */
+       cmplwi  cr5,r3,0x7000   /* 750FX */
        /* cr1 is 7400 || 7410 */
        cror    4*cr1+eq,4*cr1+eq,4*cr2+eq
        /* cr0 is 74xx */
@@ -412,7 +412,7 @@ _GLOBAL(__restore_cpu_setup)
        /* If rev 2.x, restore HID2 with low voltage bit cleared */
        mfspr   r3,PVR
        andi.   r3,r3,0xff00
-       cmp   cr0,r3,0x0200
+       cmpwi   cr0,r3,0x0200
        bne     4f
        lwz     r4,CS_HID2(r5)
        rlwinm  r4,r4,0,19,17
@@ -426,7 +426,7 @@ _GLOBAL(__restore_cpu_setup)
        mftbl   r5
 3:     mftbl   r6
        sub     r6,r6,r5
-       cmpl  cr0,r6,10000
+       cmplwi  cr0,r6,10000
        ble     3b
        /* Setup final PLL */
        mtspr   SPRN_HID1,r4
index 4963b79c73efd5501629eae9d75ca3782e98f19c..91318c15ccfa4125c4d61fd6ee33276476cae44b 100644 (file)
@@ -206,7 +206,7 @@ _GLOBAL(DoSyscall)
        andi.   r11,r11,_TIF_SYSCALL_TRACE
        bne-    syscall_dotrace
 syscall_dotrace_cont:
-       cmpl  0,r0,NR_syscalls
+       cmplwi  0,r0,NR_syscalls
        lis     r10,sys_call_table@h
        ori     r10,r10,sys_call_table@l
        slwi    r0,r0,2
@@ -222,7 +222,7 @@ ret_from_syscall:
 #endif
        mr      r6,r3
        li      r11,-_LAST_ERRNO
-       cmpl    0,r3,r11
+       cmplw   0,r3,r11
        rlwinm  r12,r1,0,0,18   /* current_thread_info() */
        blt+    30f
        lwz     r11,TI_LOCAL_FLAGS(r12)
index 121978c19abe563fcc1972bfea4837e03fbdb916..3a3e3c7424dfbb7a4825d95ef2cfa3174d109ce0 100644 (file)
@@ -800,7 +800,7 @@ load_up_fpu:
        tophys(r6,0)                    /* get __pa constant */
        addis   r3,r6,last_task_used_math@ha
        lwz     r4,last_task_used_math@l(r3)
-       cmp   0,r4,0
+       cmpwi   0,r4,0
        beq     1f
        add     r4,r4,r6
        addi    r4,r4,THREAD            /* want last_task_used_math->thread */
@@ -927,7 +927,7 @@ load_up_altivec:
        tophys(r6,0)
        addis   r3,r6,last_task_used_altivec@ha
        lwz     r4,last_task_used_altivec@l(r3)
-       cmp   0,r4,0
+       cmpwi   0,r4,0
        beq     1f
        add     r4,r4,r6
        addi    r4,r4,THREAD    /* want THREAD of last_task_used_altivec */
@@ -992,11 +992,11 @@ giveup_altivec:
        SYNC
        MTMSRD(r5)                      /* enable use of AltiVec now */
        isync
-       cmp   0,r3,0
+       cmpwi   0,r3,0
        beqlr-                          /* if no previous owner, done */
        addi    r3,r3,THREAD            /* want THREAD of task */
        lwz     r5,PT_REGS(r3)
-       cmp   0,r5,0
+       cmpwi   0,r5,0
        SAVE_32VR(0, r4, r3)
        mfvscr  vr0
        li      r4,THREAD_VSCR
@@ -1030,11 +1030,11 @@ giveup_fpu:
        MTMSRD(r5)                      /* enable use of fpu now */
        SYNC_601
        isync
-       cmp   0,r3,0
+       cmpwi   0,r3,0
        beqlr-                          /* if no previous owner, done */
        addi    r3,r3,THREAD            /* want THREAD of task */
        lwz     r5,PT_REGS(r3)
-       cmp   0,r5,0
+       cmpwi   0,r5,0
        SAVE_32FPRS(0, r3)
        mffs    fr0
        stfd    fr0,THREAD_FPSCR-4(r3)
@@ -1539,7 +1539,7 @@ initial_bats:
 #ifndef CONFIG_PPC64BRIDGE
        mfspr   r9,PVR
        rlwinm  r9,r9,16,16,31          /* r9 = 1 for 601, 4 for 604 */
-       cmp   0,r9,1
+       cmpwi   0,r9,1
        bne     4f
        ori     r11,r11,4               /* set up BAT registers for 601 */
        li      r8,0x7f                 /* valid, block length = 8MB */
@@ -1591,7 +1591,7 @@ setup_disp_bat:
        lwz     r8,4(r8)
        mfspr   r9,PVR
        rlwinm  r9,r9,16,16,31          /* r9 = 1 for 601, 4 for 604 */
-       cmp   0,r9,1
+       cmpwi   0,r9,1
        beq     1f
        mtspr   DBAT3L,r8
        mtspr   DBAT3U,r11
index 608f71ccbb87223c2ec5496dfe0caa3a87c91410..25d009c75f7bc0e3bfe4aba34679c94727d2d403 100644 (file)
@@ -79,12 +79,12 @@ BEGIN_FTR_SECTION
        /* Now check if user or arch enabled NAP mode */
        lis     r4,powersave_nap@ha
        lwz     r4,powersave_nap@l(r4)
-       cmp   0,r4,0
+       cmpwi   0,r4,0
        beq     1f
        lis     r3,HID0_NAP@h
 1:     
 END_FTR_SECTION_IFSET(CPU_FTR_CAN_NAP)
-       cmp   0,r3,0
+       cmpwi   0,r3,0
        beqlr
 
        /* Clear MSR:EE */
@@ -133,7 +133,7 @@ BEGIN_FTR_SECTION
        /* Go to low speed mode on some 750FX */
        lis     r4,powersave_lowspeed@ha
        lwz     r4,powersave_lowspeed@l(r4)
-       cmp   0,r4,0
+       cmpwi   0,r4,0
        beq     1f
        mfspr   r4,SPRN_HID1
        oris    r4,r4,0x0001
index 538a044f47073df071739d2936483918116b201b..73a58ff03900a00eb66916466683a2a1eb7879f9 100644 (file)
@@ -56,7 +56,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_CAN_NAP)
        /* Now check if user or arch enabled NAP mode */
        lis     r4,powersave_nap@ha
        lwz     r4,powersave_nap@l(r4)
-       cmp   0,r4,0
+       cmpwi   0,r4,0
        beqlr
 
        /* Clear MSR:EE */
index 26ff1a34af8d2b13d8e912fb4f6218954b208573..7141d7ecdafdca44c9c4caa05f71f9a4545f215a 100644 (file)
@@ -214,7 +214,7 @@ _GLOBAL(low_choose_750fx_pll)
        mtmsr   r0
 
        /* If switching to PLL1, disable HID0:BTIC */
-       cmpl  cr0,r3,0
+       cmplwi  cr0,r3,0
        beq     1f
        mfspr   r5,HID0
        rlwinm  r5,r5,0,27,25
@@ -239,7 +239,7 @@ _GLOBAL(low_choose_750fx_pll)
        stw     r4,nap_save_hid1@l(r6)
 
        /* If switching to PLL0, enable HID0:BTIC */
-       cmpl  cr0,r3,0
+       cmplwi  cr0,r3,0
        bne     1f
        mfspr   r5,HID0
        ori     r5,r5,HID0_BTIC
@@ -470,7 +470,7 @@ _GLOBAL(_tlbia)
        ori     r9,r9,mmu_hash_lock@l
        tophys(r9,r9)
 10:    lwarx   r7,0,r9
-       cmp   0,r7,0
+       cmpwi   0,r7,0
        bne-    10b
        stwcx.  r8,0,r9
        bne-    10b
@@ -551,7 +551,7 @@ _GLOBAL(_tlbie)
        ori     r9,r9,mmu_hash_lock@l
        tophys(r9,r9)
 10:    lwarx   r7,0,r9
-       cmp   0,r7,0
+       cmpwi   0,r7,0
        bne-    10b
        stwcx.  r8,0,r9
        bne-    10b
@@ -599,7 +599,7 @@ _GLOBAL(flush_instruction_cache)
 #else
        mfspr   r3,PVR
        rlwinm  r3,r3,16,16,31
-       cmp   0,r3,1
+       cmpwi   0,r3,1
        beqlr                   /* for 601, do nothing */
        /* 603/604 processor - use invalidate-all bit in HID0 */
        mfspr   r3,HID0
@@ -1141,7 +1141,7 @@ _GLOBAL(kernel_thread)
        li      r4,0            /* new sp (unused) */
        li      r0,__NR_clone
        sc
-       cmp   0,r3,0          /* parent or child? */
+       cmpwi   0,r3,0          /* parent or child? */
        bne     1f              /* return if parent */
        li      r0,0            /* make top-level stack frame */
        stwu    r0,-16(r1)
index 74928a7a27d140450ed8c6f99baacde6e73a7a46..7874e8a80455c9620ab960ba1e15e5c8a6aece7b 100644 (file)
@@ -80,13 +80,13 @@ _GLOBAL(csum_partial)
        adde    r0,r0,r5        /* be unnecessary to unroll this loop */
        bdnz    2b
        andi.   r4,r4,3
-3:     cmp   0,r4,2
+3:     cmpwi   0,r4,2
        blt+    4f
        lhz     r5,4(r3)
        addi    r3,r3,2
        subi    r4,r4,2
        adde    r0,r0,r5
-4:     cmp   0,r4,1
+4:     cmpwi   0,r4,1
        bne+    5f
        lbz     r5,4(r3)
        slwi    r5,r5,8         /* Upper byte of word */
@@ -143,7 +143,7 @@ _GLOBAL(csum_partial_copy_generic)
        adde    r0,r0,r9
        bdnz    82b
 13:    andi.   r5,r5,3
-3:     cmp   0,r5,2
+3:     cmpwi   0,r5,2
        blt+    4f
 83:    lhz     r6,4(r3)
        addi    r3,r3,2
@@ -151,7 +151,7 @@ _GLOBAL(csum_partial_copy_generic)
 93:    sth     r6,4(r4)
        addi    r4,r4,2
        adde    r0,r0,r6
-4:     cmp   0,r5,1
+4:     cmpwi   0,r5,1
        bne+    5f
 84:    lbz     r6,4(r3)
 94:    stb     r6,4(r4)
@@ -188,7 +188,7 @@ src_error_3:
 97:    stbu    r6,1(r4)
        bdnz    97b
 src_error:
-       cmp   0,r7,0
+       cmpwi   0,r7,0
        beq     1f
        li      r6,-EFAULT
        stw     r6,0(r7)
@@ -196,7 +196,7 @@ src_error:
        blr
 
 dst_error:
-       cmp   0,r8,0
+       cmpwi   0,r8,0
        beq     1f
        li      r6,-EFAULT
        stw     r6,0(r8)
index 6f21bde213b92b3594878886228ab1a20a0dad13..c923a815760ec50f2b478f7f2857e175a0b0575a 100644 (file)
@@ -25,7 +25,7 @@ static __inline__ int set_inUse( struct ItLpQueue * lpQueue )
 
        __asm__ __volatile__("\n\
 1:     lwarx   %0,0,%2         \n\
-       cmp   0,%0,0          \n\
+       cmpwi   0,%0,0          \n\
        li      %0,0            \n\
        bne-    2f              \n\
        addi    %0,%0,1         \n\