#include #include #include #include #include #include #include #include /* * Structure for storing CPU registers on the stack. */ #define SL_SP 0 #define SL_PC (SL_SP + 0x4) #define SL_MSR (SL_SP + 0x8) #define SL_SDR1 (SL_SP + 0xc) #define SL_SPRG0 (SL_SP + 0x10) /* 4 sprg's */ #define SL_DBAT0 (SL_SP + 0x20) #define SL_IBAT0 (SL_SP + 0x28) #define SL_DBAT1 (SL_SP + 0x30) #define SL_IBAT1 (SL_SP + 0x38) #define SL_DBAT2 (SL_SP + 0x40) #define SL_IBAT2 (SL_SP + 0x48) #define SL_DBAT3 (SL_SP + 0x50) #define SL_IBAT3 (SL_SP + 0x58) #define SL_TB (SL_SP + 0x60) #define SL_R2 (SL_SP + 0x68) #define SL_CR (SL_SP + 0x6c) #define SL_R12 (SL_SP + 0x70) /* r12 to r31 */ #define SL_SIZE (SL_R12 + 80) #if 0 .section .rodata ptr_debug: .string "ptr %x\n" #endif .section .text .align 5 _GLOBAL(do_swsusp_lowlevel) mflr r0 /* save return address into r0 */ cmpwi 0,r3,0 bc 4,2,.L3627 bl do_swsusp2_suspend_1 stw r0,SL_PC(r1) /* save return address into r1 + 4 */ stwu r1,-SL_SIZE(r1) /* save r1 into r1 - SL_SIZE */ mfcr r0 /* condition register save */ stw r0,SL_CR(r1) stw r2,SL_R2(r1) /* r2 */ stmw r12,SL_R12(r1) /* r12 */ /* Save MSR & SDR1 */ mfmsr r4 stw r4,SL_MSR(r1) mfsdr1 r4 stw r4,SL_SDR1(r1) /* Get a stable timebase and save it */ 1: mftbu r4 stw r4,SL_TB(r1) mftb r5 stw r5,SL_TB+4(r1) mftbu r3 cmpw r3,r4 bne 1b /* Save SPRGs */ mfsprg r4,0 stw r4,SL_SPRG0(r1) mfsprg r4,1 stw r4,SL_SPRG0+4(r1) mfsprg r4,2 stw r4,SL_SPRG0+8(r1) mfsprg r4,3 stw r4,SL_SPRG0+12(r1) /* Save BATs */ mfdbatu r4,0 stw r4,SL_DBAT0(r1) mfdbatl r4,0 stw r4,SL_DBAT0+4(r1) mfdbatu r4,1 stw r4,SL_DBAT1(r1) mfdbatl r4,1 stw r4,SL_DBAT1+4(r1) mfdbatu r4,2 stw r4,SL_DBAT2(r1) mfdbatl r4,2 stw r4,SL_DBAT2+4(r1) mfdbatu r4,3 stw r4,SL_DBAT3(r1) mfdbatl r4,3 stw r4,SL_DBAT3+4(r1) mfibatu r4,0 stw r4,SL_IBAT0(r1) mfibatl r4,0 stw r4,SL_IBAT0+4(r1) mfibatu r4,1 stw r4,SL_IBAT1(r1) mfibatl r4,1 stw r4,SL_IBAT1+4(r1) mfibatu r4,2 stw r4,SL_IBAT2(r1) mfibatl r4,2 stw r4,SL_IBAT2+4(r1) mfibatu r4,3 stw r4,SL_IBAT3(r1) mfibatl r4,3 stw r4,SL_IBAT3+4(r1) #if 0 /* TEST_CODE */ lis r4,0x1234 ori r4,r4,0x56 stw r4,-(SL_SIZE + 4)(r1) #endif /* get r1 physical ptr */ tophys(r5,r1) addi r5,r5,SL_PC /* save storage ptr */ lis r3,pm_sleep_storage@ha addi r3,r3,pm_sleep_storage@l stw r5,0(r3) #if 0 /* printf ptr */ lis r3,ptr_debug@ha la r3,ptr_debug@l(r3) bl printk #endif /* Backup various CPU configs stuffs */ bl __save_cpu_setup bl do_swsusp2_suspend_2 b restore_stack .L3627: bl do_swsusp2_resume_1 lis r9,swsusp_action@ha lwz r0,swsusp_action@l(r9) lis r11,swsusp_debug_state@ha lis r9,state1@ha stw r0,state1@l(r9) lwz r8,swsusp_debug_state@l(r11) lis r10,console_printk@ha lis r9,state2@ha lis r11,pagedir_resume@ha stw r8,state2@l(r9) la r11,pagedir_resume@l(r11) lwz r0,console_printk@l(r10) lwz r5,12(r11) lis r9,state3@ha stw r0,state3@l(r9) lwz r10,0(r5) lwz r4,56(r11) lis r9,origoffset@ha stw r10,origoffset@l(r9) lwz r0,0(r4) lis r11,copyoffset@ha stw r0,copyoffset@l(r11) lwz r10,origoffset@l(r9) lwz r8,copyoffset@l(r11) slwi r9,r10,r2 slwi r11,r8,r2 add r9,r9,r10 add r11,r11,8 lis r0,0xcccc ori r0,r0,52429 slwi r9,r9,r3 slwi r11,r11,r3 mullw r11,r11,r0 mullw r9,r9,r0 slwi r11,r11,r9 slwi r9,r9,r9 cmpwi r0,r5,r0 addis r9,r9,0xc000 addis r11,r11,0xc000 lis r7,origrange@ha lis r6,copyrange@ha lis r10,origpage@ha lis r8,copypage@ha lis r24,origrange@ha lis r25,copyrange@ha lis r12,origoffset@ha lis r3,copyoffset@ha stw r9,origpage@l(r10) stw r11,copypage@l(r8) stw r5,origrange@l(r7) stw r4,copyrange@l(r6) bc r12,r2,.L3646 lis r4,0xcccc lis r28,loop@ha lis r26,origoffset@ha lis r29,origrange@ha lis r30,origpage@ha ori r4,r4,52429 lis r27,copyoffset@ha lis r31,copypage@ha .L3632: li r0,r0 stw r0,loop@l(r28) lwz r9,loop@l(r28) cmplwi r0,r9,1023 bc r12,r1,.L3637 lis r7,loop@ha lis r5,origpage@ha lis r6,copypage@ha .L3635: lwz r8,loop@l(r7) lwz r9,loop@l(r7) lwz r11,copypage@l(r6) slwi r9,r9,r2 lwzx r0,r9,r11 lwz r10,origpage@l(r5) slwi r8,r8,r2 stwx r0,r8,r10 lwz r9,loop@l(r7) addi r9,r9,r1 stw r9,loop@l(r7) lwz r0,loop@l(r7) cmplwi r0,r0,1023 bc r4,r1,.L3635 .L3637: lwz r11,origrange@l(r29) lwz r9,origoffset@l(r26) lwz r0,4(r11) cmplw r0,r9,r0 bc r4,r0,.L3638 lwz r9,origoffset@l(r12) lwz r11,origpage@l(r30) addi r9,r9,r1 addi r11,r11,4096 stw r9,origoffset@l(r12) stw r11,origpage@l(r30) b .L3639 .L3638: lwz r9,8(r11) cmpwi r0,r9,r0 stw r9,origrange@l(r24) bc r12,r2,.L3639 lwz r9,0(r9) stw r9,origoffset@l(r12) lwz r0,origoffset@l(r12) slwi r9,r0,r2 add r9,r9,r0 slwi r9,r9,r3 mullw r9,r9,r4 slwi r9,r9,r9 addis r9,r9,0xc000 stw r9,origpage@l(r30) .L3639: lis r9,copyrange@ha lwz r9,copyrange@l(r9) lwz r11,copyoffset@l(r27) lwz r0,4(r9) cmplw r0,r11,r0 bc r4,r0,.L3642 lwz r9,copyoffset@l(r3) lwz r11,copypage@l(r31) addi r9,r9,r1 addi r11,r11,4096 stw r9,copyoffset@l(r3) stw r11,copypage@l(r31) b .L3630 .L3642: lwz r9,r8(r9) cmpwi r0,r9,r0 stw r9,copyrange@l(r25) bc r12,r2,.L3630 lwz r9,0(r9) stw r9,copyoffset@l(r3) lwz r0,copyoffset@l(r3) slwi r9,r0,r2 add r9,r9,r0 slwi r9,r9,r3 mullw r9,r9,r4 slwi r9,r9,r9 addis r9,r9,0xc000 stw 9,copypage@l(r31) .L3630: lwz r0,origrange@l(r29) cmpwi r0,r0,r0 bc r4,r2,.L3632 .L3646: lis r9,state1@ha lwz r7,state1@l(r9) lis r11,state2@ha lwz r8,state2@l(r11) lis r9,state3@ha lwz r0,state3@l(r9) lis r11,swsusp_action@ha lis r9,swsusp_debug_state@ha lis r10,console_printk@ha stw r7,swsusp_action@l(r11) stw r8,swsusp_debug_state@l(r9) stw r0,console_printk@l(r10) #if 0 // bl pm_turn_off_mmu //#else // mfmsr r3 // andi. r0,r3,MSR_DR|MSR_IR /* MMU enabled? */ beqlr andc r3,r3,r0 mtspr SRR0,r4 mtspr SRR1,r3 sync #endif #if 0 /* Turn off data relocation. */ mfmsr r3 /* Save MSR in r3 */ rlwinm r3,r3,0,28,26 /* Turn off DR bit */ sync mtmsr r3 isync #endif #if 0 /* force supervisor */ mfmsr r4 li r3,MSR_PR /* ensure supervisor! */ ori r3,r3,MSR_IR|MSR_DR andc r4,r4,r3 mtmsr r4 isync #endif #if 0 /* MMU off */ li r3,0 mtspr IBAT0U,r3 mtspr IBAT0L,r3 mtspr IBAT1U,r3 mtspr IBAT1L,r3 mtspr IBAT2U,r3 mtspr IBAT2L,r3 mtspr IBAT3U,r3 mtspr IBAT3L,r3 mtspr DBAT0U,r3 mtspr DBAT0L,r3 mtspr DBAT1U,r3 mtspr DBAT1L,r3 mtspr DBAT2U,r3 mtspr DBAT2L,r3 mtspr DBAT3U,r3 mtspr DBAT3L,r3 #endif #if 0 /* Make sure HID0 no longer contains any sleep bit */ mfspr r3,HID0 rlwinm r3,r3,0,11,7 /* clear SLEEP, NAP, DOZE bits */ mtspr HID0,r3 sync isync #endif #if 0 /* Won't that cause problems on CPU that doesn't support it ? */ lis r3, 0 mtspr SPRN_MMCR0, r3 #endif /* sanitize MSR */ mfmsr r3 ori r3,r3,MSR_EE|MSR_IP xori r3,r3,MSR_EE|MSR_IP sync isync mtmsr r3 sync isync /* Recover sleep storage */ lis r3,pm_sleep_storage@ha addi r3,r3,pm_sleep_storage@l tophys(r3,r3) lwz r1,0(r3) /* Invalidate & enable L1 cache, we don't care about * whatever the ROM may have tried to write to memory */ bl __inval_enable_L1 /* Restore the kernel's segment registers before * we do any r1 memory access as we are not sure they * are in a sane state above the first 256Mb region */ li r0,16 /* load up segment register values */ mtctr r0 /* for context 0 */ lis r3,0x2000 /* Ku = 1, VSID = 0 */ li r4,0 3: mtsrin r3,r4 addi r3,r3,0x111 /* increment VSID */ addis r4,r4,0x1000 /* address of next segment */ bdnz 3b sync isync subi r1,r1,SL_PC /* Restore various CPU config stuffs */ bl __restore_cpu_setup /* Restore the BATs, and SDR1. Then we can turn on the MMU. */ lwz r4,SL_SDR1(r1) mtsdr1 r4 lwz r4,SL_SPRG0(r1) mtsprg 0,r4 lwz r4,SL_SPRG0+4(r1) mtsprg 1,r4 lwz r4,SL_SPRG0+8(r1) mtsprg 2,r4 lwz r4,SL_SPRG0+12(r1) mtsprg 3,r4 lwz r4,SL_DBAT0(r1) mtdbatu 0,r4 lwz r4,SL_DBAT0+4(r1) mtdbatl 0,r4 lwz r4,SL_DBAT1(r1) mtdbatu 1,r4 lwz r4,SL_DBAT1+4(r1) mtdbatl 1,r4 lwz r4,SL_DBAT2(r1) mtdbatu 2,r4 lwz r4,SL_DBAT2+4(r1) mtdbatl 2,r4 lwz r4,SL_DBAT3(r1) mtdbatu 3,r4 lwz r4,SL_DBAT3+4(r1) mtdbatl 3,r4 lwz r4,SL_IBAT0(r1) mtibatu 0,r4 lwz r4,SL_IBAT0+4(r1) mtibatl 0,r4 lwz r4,SL_IBAT1(r1) mtibatu 1,r4 lwz r4,SL_IBAT1+4(r1) mtibatl 1,r4 lwz r4,SL_IBAT2(r1) mtibatu 2,r4 lwz r4,SL_IBAT2+4(r1) mtibatl 2,r4 lwz r4,SL_IBAT3(r1) mtibatu 3,r4 lwz r4,SL_IBAT3+4(r1) mtibatl 3,r4 BEGIN_FTR_SECTION li r4,0 mtspr SPRN_DBAT4U,r4 mtspr SPRN_DBAT4L,r4 mtspr SPRN_DBAT5U,r4 mtspr SPRN_DBAT5L,r4 mtspr SPRN_DBAT6U,r4 mtspr SPRN_DBAT6L,r4 mtspr SPRN_DBAT7U,r4 mtspr SPRN_DBAT7L,r4 mtspr SPRN_IBAT4U,r4 mtspr SPRN_IBAT4L,r4 mtspr SPRN_IBAT5U,r4 mtspr SPRN_IBAT5L,r4 mtspr SPRN_IBAT6U,r4 mtspr SPRN_IBAT6L,r4 mtspr SPRN_IBAT7U,r4 mtspr SPRN_IBAT7L,r4 END_FTR_SECTION_IFSET(CPU_FTR_HAS_HIGH_BATS) /* Flush all TLBs */ lis r4,0x1000 1: addic. r4,r4,-0x1000 tlbie r4 blt 1b sync /* restore the MSR and turn on the MMU */ lwz r3,SL_MSR(r1) bl pm_turn_on_mmu /* get back the stack pointer */ tovirt(r1,r1) /* Restore TB */ li r3,0 mttbl r3 lwz r3,SL_TB(r1) lwz r4,SL_TB+4(r1) mttbu r3 mttbl r4 bl do_swsusp2_resume_2 restore_stack: /* Restore the callee-saved registers and return */ lwz r0,SL_CR(r1) mtcr r0 lwz r2,SL_R2(r1) lmw r12,SL_R12(r1) addi r1,r1,SL_SIZE lwz r0,4(r1) mtlr r0 blr pm_turn_on_mmu: mflr r4 tovirt(r4,r4) mtsrr0 r4 mtsrr1 r3 sync isync rfi pm_turn_off_mmu: mfmsr r3 andi. r0,r3,MSR_DR|MSR_IR /* MMU enabled? */ beqlr andc r3,r3,r0 mtspr SRR0,r4 mtspr SRR1,r3 sync rfi .section ".data.nosave" origrange: .long 0 copyrange: .long 0 origoffset: .long 0 copyoffset: .long 0 origpage: .long 0 copypage: .long 0 loop: .long 0 state1: .long 0 state2: .long 0 state3: .long 0 c_loops_per_jiffy_ref: .long 0 cpu_khz_ref: .long 0 .section .data .balign L1_CACHE_LINE_SIZE pm_sleep_storage: .long 0 .balign L1_CACHE_LINE_SIZE,0 .text