/* Test for a 601 */
mfpvr r10
srwi r10,r10,16
- cmpi 0,r10,1 /* 601 ? */
+ cmpwi 0,r10,1 /* 601 ? */
beq .clearbats_601
/* Clear BATs */
/* Wait for the invalidation to complete */
mfspr r8,PVR
srwi r8,r8,16
- cmpli cr0,r8,0x8000 /* 7450 */
- cmpli cr1,r8,0x8001 /* 7455 */
- cmpli cr2,r8,0x8002 /* 7457 */
+ cmplwi cr0,r8,0x8000 /* 7450 */
+ cmplwi cr1,r8,0x8001 /* 7455 */
+ cmplwi cr2,r8,0x8002 /* 7457 */
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq /* Now test if any are true. */
cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
bne 2f
udelay:
mfspr r4,PVR
srwi r4,r4,16
- cmpi 0,r4,1 /* 601 ? */
+ cmpwi 0,r4,1 /* 601 ? */
bne .udelay_not_601
00: li r0,86 /* Instructions / microsecond? */
mtctr r0
1: mftbu r5
mftb r6
mftbu r7
- cmp 0,r5,r7
+ cmpw 0,r5,r7
bne 1b /* Get [synced] base time */
addc r9,r6,r4 /* Compute end time */
addze r8,r5
2: mftbu r5
- cmp 0,r5,r8
+ cmpw 0,r5,r8
blt 2b
bgt 3f
mftb r6
- cmp 0,r6,r9
+ cmpw 0,r6,r9
blt 2b
3: blr
setup_bats:
mfpvr 5
rlwinm 5,5,16,16,31 /* r3 = 1 for 601, 4 for 604 */
- cmpi 0,5,1
+ cmpwi 0,5,1
li 0,0
bne 4f
mtibatl 3,0 /* invalidate BAT first */
* Check if we need to relocate ourselves to the link addr or were
* we loaded there to begin with.
*/
- cmp cr0,r3,r4
+ cmpw cr0,r3,r4
beq start_ldr /* If 0, we don't need to relocate */
/* Move this code somewhere safe. This is max(load + size, end)
GETSYM(r4, start)
mr r3,r8 /* Get the load addr */
- cmp cr0,r4,r3 /* If we need to copy from the end, do so */
+ cmpw cr0,r4,r3 /* If we need to copy from the end, do so */
bgt do_relocate_from_end
do_relocate_from_start:
subi r4,r4,4
li r0,0
50: stwu r0,4(r3)
- cmp cr0,r3,r4
+ cmpw cr0,r3,r4
bne 50b
90: mr r9,r1 /* Save old stack pointer (in case it matters) */
lis r1,.stack@h
setup_750cx:
mfspr r10, SPRN_HID1
rlwinm r10,r10,4,28,31
- cmpi cr0,r10,7
- cmpi cr1,r10,9
- cmpi cr2,r10,11
+ cmpwi cr0,r10,7
+ cmpwi cr1,r10,9
+ cmpwi cr2,r10,11
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
bnelr
/* Now deal with CPU type dependent registers */
mfspr r3,PVR
srwi r3,r3,16
- cmpli cr0,r3,0x8000 /* 7450 */
- cmpli cr1,r3,0x000c /* 7400 */
- cmpli cr2,r3,0x800c /* 7410 */
- cmpli cr3,r3,0x8001 /* 7455 */
- cmpli cr4,r3,0x8002 /* 7457 */
- cmpli cr5,r3,0x7000 /* 750FX */
+ cmplwi cr0,r3,0x8000 /* 7450 */
+ cmplwi cr1,r3,0x000c /* 7400 */
+ cmplwi cr2,r3,0x800c /* 7410 */
+ cmplwi cr3,r3,0x8001 /* 7455 */
+ cmplwi cr4,r3,0x8002 /* 7457 */
+ cmplwi cr5,r3,0x7000 /* 750FX */
/* cr1 is 7400 || 7410 */
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
/* cr0 is 74xx */
/* If rev 2.x, backup HID2 */
mfspr r3,PVR
andi. r3,r3,0xff00
- cmpi cr0,r3,0x0200
+ cmpwi cr0,r3,0x0200
bne 1f
mfspr r4,SPRN_HID2
stw r4,CS_HID2(r5)
/* Now deal with CPU type dependent registers */
mfspr r3,PVR
srwi r3,r3,16
- cmpli cr0,r3,0x8000 /* 7450 */
- cmpli cr1,r3,0x000c /* 7400 */
- cmpli cr2,r3,0x800c /* 7410 */
- cmpli cr3,r3,0x8001 /* 7455 */
- cmpli cr4,r3,0x8002 /* 7457 */
- cmpli cr5,r3,0x7000 /* 750FX */
+ cmplwi cr0,r3,0x8000 /* 7450 */
+ cmplwi cr1,r3,0x000c /* 7400 */
+ cmplwi cr2,r3,0x800c /* 7410 */
+ cmplwi cr3,r3,0x8001 /* 7455 */
+ cmplwi cr4,r3,0x8002 /* 7457 */
+ cmplwi cr5,r3,0x7000 /* 750FX */
/* cr1 is 7400 || 7410 */
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
/* cr0 is 74xx */
/* If rev 2.x, restore HID2 with low voltage bit cleared */
mfspr r3,PVR
andi. r3,r3,0xff00
- cmpi cr0,r3,0x0200
+ cmpwi cr0,r3,0x0200
bne 4f
lwz r4,CS_HID2(r5)
rlwinm r4,r4,0,19,17
mftbl r5
3: mftbl r6
sub r6,r6,r5
- cmpli cr0,r6,10000
+ cmplwi cr0,r6,10000
ble 3b
/* Setup final PLL */
mtspr SPRN_HID1,r4
andi. r11,r11,_TIF_SYSCALL_TRACE
bne- syscall_dotrace
syscall_dotrace_cont:
- cmpli 0,r0,NR_syscalls
+ cmplwi 0,r0,NR_syscalls
lis r10,sys_call_table@h
ori r10,r10,sys_call_table@l
slwi r0,r0,2
#endif
mr r6,r3
li r11,-_LAST_ERRNO
- cmpl 0,r3,r11
+ cmplw 0,r3,r11
rlwinm r12,r1,0,0,18 /* current_thread_info() */
blt+ 30f
lwz r11,TI_LOCAL_FLAGS(r12)
tophys(r6,0) /* get __pa constant */
addis r3,r6,last_task_used_math@ha
lwz r4,last_task_used_math@l(r3)
- cmpi 0,r4,0
+ cmpwi 0,r4,0
beq 1f
add r4,r4,r6
addi r4,r4,THREAD /* want last_task_used_math->thread */
tophys(r6,0)
addis r3,r6,last_task_used_altivec@ha
lwz r4,last_task_used_altivec@l(r3)
- cmpi 0,r4,0
+ cmpwi 0,r4,0
beq 1f
add r4,r4,r6
addi r4,r4,THREAD /* want THREAD of last_task_used_altivec */
SYNC
MTMSRD(r5) /* enable use of AltiVec now */
isync
- cmpi 0,r3,0
+ cmpwi 0,r3,0
beqlr- /* if no previous owner, done */
addi r3,r3,THREAD /* want THREAD of task */
lwz r5,PT_REGS(r3)
- cmpi 0,r5,0
+ cmpwi 0,r5,0
SAVE_32VR(0, r4, r3)
mfvscr vr0
li r4,THREAD_VSCR
MTMSRD(r5) /* enable use of fpu now */
SYNC_601
isync
- cmpi 0,r3,0
+ cmpwi 0,r3,0
beqlr- /* if no previous owner, done */
addi r3,r3,THREAD /* want THREAD of task */
lwz r5,PT_REGS(r3)
- cmpi 0,r5,0
+ cmpwi 0,r5,0
SAVE_32FPRS(0, r3)
mffs fr0
stfd fr0,THREAD_FPSCR-4(r3)
#ifndef CONFIG_PPC64BRIDGE
mfspr r9,PVR
rlwinm r9,r9,16,16,31 /* r9 = 1 for 601, 4 for 604 */
- cmpi 0,r9,1
+ cmpwi 0,r9,1
bne 4f
ori r11,r11,4 /* set up BAT registers for 601 */
li r8,0x7f /* valid, block length = 8MB */
lwz r8,4(r8)
mfspr r9,PVR
rlwinm r9,r9,16,16,31 /* r9 = 1 for 601, 4 for 604 */
- cmpi 0,r9,1
+ cmpwi 0,r9,1
beq 1f
mtspr DBAT3L,r8
mtspr DBAT3U,r11
/* Now check if user or arch enabled NAP mode */
lis r4,powersave_nap@ha
lwz r4,powersave_nap@l(r4)
- cmpi 0,r4,0
+ cmpwi 0,r4,0
beq 1f
lis r3,HID0_NAP@h
1:
END_FTR_SECTION_IFSET(CPU_FTR_CAN_NAP)
- cmpi 0,r3,0
+ cmpwi 0,r3,0
beqlr
/* Clear MSR:EE */
/* Go to low speed mode on some 750FX */
lis r4,powersave_lowspeed@ha
lwz r4,powersave_lowspeed@l(r4)
- cmpi 0,r4,0
+ cmpwi 0,r4,0
beq 1f
mfspr r4,SPRN_HID1
oris r4,r4,0x0001
/* Now check if user or arch enabled NAP mode */
lis r4,powersave_nap@ha
lwz r4,powersave_nap@l(r4)
- cmpi 0,r4,0
+ cmpwi 0,r4,0
beqlr
/* Clear MSR:EE */
mtmsr r0
/* If switching to PLL1, disable HID0:BTIC */
- cmpli cr0,r3,0
+ cmplwi cr0,r3,0
beq 1f
mfspr r5,HID0
rlwinm r5,r5,0,27,25
stw r4,nap_save_hid1@l(r6)
/* If switching to PLL0, enable HID0:BTIC */
- cmpli cr0,r3,0
+ cmplwi cr0,r3,0
bne 1f
mfspr r5,HID0
ori r5,r5,HID0_BTIC
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
- cmpi 0,r7,0
+ cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
- cmpi 0,r7,0
+ cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
#else
mfspr r3,PVR
rlwinm r3,r3,16,16,31
- cmpi 0,r3,1
+ cmpwi 0,r3,1
beqlr /* for 601, do nothing */
/* 603/604 processor - use invalidate-all bit in HID0 */
mfspr r3,HID0
li r4,0 /* new sp (unused) */
li r0,__NR_clone
sc
- cmpi 0,r3,0 /* parent or child? */
+ cmpwi 0,r3,0 /* parent or child? */
bne 1f /* return if parent */
li r0,0 /* make top-level stack frame */
stwu r0,-16(r1)
adde r0,r0,r5 /* be unnecessary to unroll this loop */
bdnz 2b
andi. r4,r4,3
-3: cmpi 0,r4,2
+3: cmpwi 0,r4,2
blt+ 4f
lhz r5,4(r3)
addi r3,r3,2
subi r4,r4,2
adde r0,r0,r5
-4: cmpi 0,r4,1
+4: cmpwi 0,r4,1
bne+ 5f
lbz r5,4(r3)
slwi r5,r5,8 /* Upper byte of word */
adde r0,r0,r9
bdnz 82b
13: andi. r5,r5,3
-3: cmpi 0,r5,2
+3: cmpwi 0,r5,2
blt+ 4f
83: lhz r6,4(r3)
addi r3,r3,2
93: sth r6,4(r4)
addi r4,r4,2
adde r0,r0,r6
-4: cmpi 0,r5,1
+4: cmpwi 0,r5,1
bne+ 5f
84: lbz r6,4(r3)
94: stb r6,4(r4)
97: stbu r6,1(r4)
bdnz 97b
src_error:
- cmpi 0,r7,0
+ cmpwi 0,r7,0
beq 1f
li r6,-EFAULT
stw r6,0(r7)
blr
dst_error:
- cmpi 0,r8,0
+ cmpwi 0,r8,0
beq 1f
li r6,-EFAULT
stw r6,0(r8)
__asm__ __volatile__("\n\
1: lwarx %0,0,%2 \n\
- cmpi 0,%0,0 \n\
+ cmpwi 0,%0,0 \n\
li %0,0 \n\
bne- 2f \n\
addi %0,%0,1 \n\