Commit d3cdfd2a15d87d040a5f553231b979fbc28b98e7
Committed by
Kevin Hilman
1 parent
cc1b602887
Exists in
master
and in
4 other branches
OMAP3: remove unused code from the ASM sleep code
Remove unused code: - macros, - variables, - unused semaphore locking API. This API shall be added back when needed, - infinite loops for debug. Tested on N900 and Beagleboard with full RET and OFF modes, using cpuidle and suspend. Signed-off-by: Jean Pihet <j-pihet@ti.com> Acked-by: Santosh Shilimkar <santosh.shilimkar@ti.com> Reviewed-by: Nishanth Menon <nm@ti.com> Tested-by: Nishanth Menon<nm@ti.com> Signed-off-by: Kevin Hilman <khilman@deeprootsystems.com>
Showing 2 changed files with 6 additions and 53 deletions Inline Diff
arch/arm/mach-omap2/pm.h
1 | /* | 1 | /* |
2 | * OMAP2/3 Power Management Routines | 2 | * OMAP2/3 Power Management Routines |
3 | * | 3 | * |
4 | * Copyright (C) 2008 Nokia Corporation | 4 | * Copyright (C) 2008 Nokia Corporation |
5 | * Jouni Hogander | 5 | * Jouni Hogander |
6 | * | 6 | * |
7 | * This program is free software; you can redistribute it and/or modify | 7 | * This program is free software; you can redistribute it and/or modify |
8 | * it under the terms of the GNU General Public License version 2 as | 8 | * it under the terms of the GNU General Public License version 2 as |
9 | * published by the Free Software Foundation. | 9 | * published by the Free Software Foundation. |
10 | */ | 10 | */ |
11 | #ifndef __ARCH_ARM_MACH_OMAP2_PM_H | 11 | #ifndef __ARCH_ARM_MACH_OMAP2_PM_H |
12 | #define __ARCH_ARM_MACH_OMAP2_PM_H | 12 | #define __ARCH_ARM_MACH_OMAP2_PM_H |
13 | 13 | ||
14 | #include <plat/powerdomain.h> | 14 | #include <plat/powerdomain.h> |
15 | 15 | ||
16 | extern void *omap3_secure_ram_storage; | 16 | extern void *omap3_secure_ram_storage; |
17 | extern void omap3_pm_off_mode_enable(int); | 17 | extern void omap3_pm_off_mode_enable(int); |
18 | extern void omap_sram_idle(void); | 18 | extern void omap_sram_idle(void); |
19 | extern int omap3_can_sleep(void); | 19 | extern int omap3_can_sleep(void); |
20 | extern int omap_set_pwrdm_state(struct powerdomain *pwrdm, u32 state); | 20 | extern int omap_set_pwrdm_state(struct powerdomain *pwrdm, u32 state); |
21 | extern int omap3_idle_init(void); | 21 | extern int omap3_idle_init(void); |
22 | 22 | ||
23 | struct cpuidle_params { | 23 | struct cpuidle_params { |
24 | u8 valid; | 24 | u8 valid; |
25 | u32 sleep_latency; | 25 | u32 sleep_latency; |
26 | u32 wake_latency; | 26 | u32 wake_latency; |
27 | u32 threshold; | 27 | u32 threshold; |
28 | }; | 28 | }; |
29 | 29 | ||
30 | #if defined(CONFIG_PM) && defined(CONFIG_CPU_IDLE) | 30 | #if defined(CONFIG_PM) && defined(CONFIG_CPU_IDLE) |
31 | extern void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params); | 31 | extern void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params); |
32 | #else | 32 | #else |
33 | static | 33 | static |
34 | inline void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params) | 34 | inline void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params) |
35 | { | 35 | { |
36 | } | 36 | } |
37 | #endif | 37 | #endif |
38 | 38 | ||
39 | extern int omap3_pm_get_suspend_state(struct powerdomain *pwrdm); | 39 | extern int omap3_pm_get_suspend_state(struct powerdomain *pwrdm); |
40 | extern int omap3_pm_set_suspend_state(struct powerdomain *pwrdm, int state); | 40 | extern int omap3_pm_set_suspend_state(struct powerdomain *pwrdm, int state); |
41 | 41 | ||
42 | extern u32 wakeup_timer_seconds; | 42 | extern u32 wakeup_timer_seconds; |
43 | extern u32 wakeup_timer_milliseconds; | 43 | extern u32 wakeup_timer_milliseconds; |
44 | extern struct omap_dm_timer *gptimer_wakeup; | 44 | extern struct omap_dm_timer *gptimer_wakeup; |
45 | 45 | ||
46 | #ifdef CONFIG_PM_DEBUG | 46 | #ifdef CONFIG_PM_DEBUG |
47 | extern void omap2_pm_dump(int mode, int resume, unsigned int us); | 47 | extern void omap2_pm_dump(int mode, int resume, unsigned int us); |
48 | extern void omap2_pm_wakeup_on_timer(u32 seconds, u32 milliseconds); | 48 | extern void omap2_pm_wakeup_on_timer(u32 seconds, u32 milliseconds); |
49 | extern int omap2_pm_debug; | 49 | extern int omap2_pm_debug; |
50 | extern u32 enable_off_mode; | 50 | extern u32 enable_off_mode; |
51 | extern u32 sleep_while_idle; | 51 | extern u32 sleep_while_idle; |
52 | #else | 52 | #else |
53 | #define omap2_pm_dump(mode, resume, us) do {} while (0); | 53 | #define omap2_pm_dump(mode, resume, us) do {} while (0); |
54 | #define omap2_pm_wakeup_on_timer(seconds, milliseconds) do {} while (0); | 54 | #define omap2_pm_wakeup_on_timer(seconds, milliseconds) do {} while (0); |
55 | #define omap2_pm_debug 0 | 55 | #define omap2_pm_debug 0 |
56 | #define enable_off_mode 0 | 56 | #define enable_off_mode 0 |
57 | #define sleep_while_idle 0 | 57 | #define sleep_while_idle 0 |
58 | #endif | 58 | #endif |
59 | 59 | ||
60 | #if defined(CONFIG_CPU_IDLE) | 60 | #if defined(CONFIG_CPU_IDLE) |
61 | extern void omap3_cpuidle_update_states(u32, u32); | 61 | extern void omap3_cpuidle_update_states(u32, u32); |
62 | #endif | 62 | #endif |
63 | 63 | ||
64 | #if defined(CONFIG_PM_DEBUG) && defined(CONFIG_DEBUG_FS) | 64 | #if defined(CONFIG_PM_DEBUG) && defined(CONFIG_DEBUG_FS) |
65 | extern void pm_dbg_update_time(struct powerdomain *pwrdm, int prev); | 65 | extern void pm_dbg_update_time(struct powerdomain *pwrdm, int prev); |
66 | extern int pm_dbg_regset_save(int reg_set); | 66 | extern int pm_dbg_regset_save(int reg_set); |
67 | extern int pm_dbg_regset_init(int reg_set); | 67 | extern int pm_dbg_regset_init(int reg_set); |
68 | #else | 68 | #else |
69 | #define pm_dbg_update_time(pwrdm, prev) do {} while (0); | 69 | #define pm_dbg_update_time(pwrdm, prev) do {} while (0); |
70 | #define pm_dbg_regset_save(reg_set) do {} while (0); | 70 | #define pm_dbg_regset_save(reg_set) do {} while (0); |
71 | #define pm_dbg_regset_init(reg_set) do {} while (0); | 71 | #define pm_dbg_regset_init(reg_set) do {} while (0); |
72 | #endif /* CONFIG_PM_DEBUG */ | 72 | #endif /* CONFIG_PM_DEBUG */ |
73 | 73 | ||
74 | extern void omap24xx_idle_loop_suspend(void); | 74 | extern void omap24xx_idle_loop_suspend(void); |
75 | 75 | ||
76 | extern void omap24xx_cpu_suspend(u32 dll_ctrl, void __iomem *sdrc_dlla_ctrl, | 76 | extern void omap24xx_cpu_suspend(u32 dll_ctrl, void __iomem *sdrc_dlla_ctrl, |
77 | void __iomem *sdrc_power); | 77 | void __iomem *sdrc_power); |
78 | extern void omap34xx_cpu_suspend(u32 *addr, int save_state); | 78 | extern void omap34xx_cpu_suspend(u32 *addr, int save_state); |
79 | extern void save_secure_ram_context(u32 *addr); | 79 | extern void save_secure_ram_context(u32 *addr); |
80 | extern void omap3_save_scratchpad_contents(void); | 80 | extern void omap3_save_scratchpad_contents(void); |
81 | 81 | ||
82 | extern unsigned int omap24xx_idle_loop_suspend_sz; | 82 | extern unsigned int omap24xx_idle_loop_suspend_sz; |
83 | extern unsigned int omap34xx_suspend_sz; | ||
84 | extern unsigned int save_secure_ram_context_sz; | 83 | extern unsigned int save_secure_ram_context_sz; |
85 | extern unsigned int omap24xx_cpu_suspend_sz; | 84 | extern unsigned int omap24xx_cpu_suspend_sz; |
86 | extern unsigned int omap34xx_cpu_suspend_sz; | 85 | extern unsigned int omap34xx_cpu_suspend_sz; |
87 | 86 | ||
88 | #define PM_RTA_ERRATUM_i608 (1 << 0) | 87 | #define PM_RTA_ERRATUM_i608 (1 << 0) |
89 | #define PM_SDRC_WAKEUP_ERRATUM_i583 (1 << 1) | 88 | #define PM_SDRC_WAKEUP_ERRATUM_i583 (1 << 1) |
90 | 89 | ||
91 | #if defined(CONFIG_PM) && defined(CONFIG_ARCH_OMAP3) | 90 | #if defined(CONFIG_PM) && defined(CONFIG_ARCH_OMAP3) |
92 | extern u16 pm34xx_errata; | 91 | extern u16 pm34xx_errata; |
93 | #define IS_PM34XX_ERRATUM(id) (pm34xx_errata & (id)) | 92 | #define IS_PM34XX_ERRATUM(id) (pm34xx_errata & (id)) |
94 | extern void enable_omap3630_toggle_l2_on_restore(void); | 93 | extern void enable_omap3630_toggle_l2_on_restore(void); |
95 | #else | 94 | #else |
96 | #define IS_PM34XX_ERRATUM(id) 0 | 95 | #define IS_PM34XX_ERRATUM(id) 0 |
97 | static inline void enable_omap3630_toggle_l2_on_restore(void) { } | 96 | static inline void enable_omap3630_toggle_l2_on_restore(void) { } |
98 | #endif /* defined(CONFIG_PM) && defined(CONFIG_ARCH_OMAP3) */ | 97 | #endif /* defined(CONFIG_PM) && defined(CONFIG_ARCH_OMAP3) */ |
99 | 98 | ||
100 | #endif | 99 | #endif |
101 | 100 |
arch/arm/mach-omap2/sleep34xx.S
1 | /* | 1 | /* |
2 | * linux/arch/arm/mach-omap2/sleep.S | 2 | * linux/arch/arm/mach-omap2/sleep.S |
3 | * | 3 | * |
4 | * (C) Copyright 2007 | 4 | * (C) Copyright 2007 |
5 | * Texas Instruments | 5 | * Texas Instruments |
6 | * Karthik Dasu <karthik-dp@ti.com> | 6 | * Karthik Dasu <karthik-dp@ti.com> |
7 | * | 7 | * |
8 | * (C) Copyright 2004 | 8 | * (C) Copyright 2004 |
9 | * Texas Instruments, <www.ti.com> | 9 | * Texas Instruments, <www.ti.com> |
10 | * Richard Woodruff <r-woodruff2@ti.com> | 10 | * Richard Woodruff <r-woodruff2@ti.com> |
11 | * | 11 | * |
12 | * This program is free software; you can redistribute it and/or | 12 | * This program is free software; you can redistribute it and/or |
13 | * modify it under the terms of the GNU General Public License as | 13 | * modify it under the terms of the GNU General Public License as |
14 | * published by the Free Software Foundation; either version 2 of | 14 | * published by the Free Software Foundation; either version 2 of |
15 | * the License, or (at your option) any later version. | 15 | * the License, or (at your option) any later version. |
16 | * | 16 | * |
17 | * This program is distributed in the hope that it will be useful, | 17 | * This program is distributed in the hope that it will be useful, |
18 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | 18 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
19 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the | 19 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the |
20 | * GNU General Public License for more details. | 20 | * GNU General Public License for more details. |
21 | * | 21 | * |
22 | * You should have received a copy of the GNU General Public License | 22 | * You should have received a copy of the GNU General Public License |
23 | * along with this program; if not, write to the Free Software | 23 | * along with this program; if not, write to the Free Software |
24 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, | 24 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, |
25 | * MA 02111-1307 USA | 25 | * MA 02111-1307 USA |
26 | */ | 26 | */ |
27 | #include <linux/linkage.h> | 27 | #include <linux/linkage.h> |
28 | #include <asm/assembler.h> | 28 | #include <asm/assembler.h> |
29 | #include <mach/io.h> | 29 | #include <mach/io.h> |
30 | 30 | ||
31 | #include "cm.h" | 31 | #include "cm.h" |
32 | #include "prm.h" | 32 | #include "prm.h" |
33 | #include "sdrc.h" | 33 | #include "sdrc.h" |
34 | #include "control.h" | 34 | #include "control.h" |
35 | 35 | ||
36 | #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c | 36 | #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c |
37 | 37 | ||
38 | #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \ | ||
39 | OMAP3430_PM_PREPWSTST) | ||
40 | #define PM_PREPWSTST_CORE_P 0x48306AE8 | 38 | #define PM_PREPWSTST_CORE_P 0x48306AE8 |
41 | #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \ | ||
42 | OMAP3430_PM_PREPWSTST) | ||
43 | #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL | 39 | #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL |
44 | #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) | 40 | #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) |
45 | #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST) | 41 | #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST) |
46 | #define SRAM_BASE_P 0x40200000 | 42 | #define SRAM_BASE_P 0x40200000 |
47 | #define CONTROL_STAT 0x480022F0 | 43 | #define CONTROL_STAT 0x480022F0 |
48 | #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\ | 44 | #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\ |
49 | + OMAP36XX_CONTROL_MEM_RTA_CTRL) | 45 | + OMAP36XX_CONTROL_MEM_RTA_CTRL) |
50 | #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is | 46 | #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is |
51 | * available */ | 47 | * available */ |
52 | #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ | 48 | #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ |
53 | + SCRATCHPAD_MEM_OFFS) | 49 | + SCRATCHPAD_MEM_OFFS) |
54 | #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) | 50 | #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) |
55 | #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) | 51 | #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) |
56 | #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) | 52 | #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) |
57 | #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0) | 53 | #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0) |
58 | #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0) | 54 | #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0) |
59 | #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1) | 55 | #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1) |
60 | #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1) | 56 | #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1) |
61 | #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1) | 57 | #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1) |
62 | #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) | 58 | #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) |
63 | #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) | 59 | #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) |
64 | 60 | ||
65 | .text | ||
66 | /* Function to acquire the semaphore in scratchpad */ | ||
67 | ENTRY(lock_scratchpad_sem) | ||
68 | stmfd sp!, {lr} @ save registers on stack | ||
69 | wait_sem: | ||
70 | mov r0,#1 | ||
71 | ldr r1, sdrc_scratchpad_sem | ||
72 | wait_loop: | ||
73 | ldr r2, [r1] @ load the lock value | ||
74 | cmp r2, r0 @ is the lock free ? | ||
75 | beq wait_loop @ not free... | ||
76 | swp r2, r0, [r1] @ semaphore free so lock it and proceed | ||
77 | cmp r2, r0 @ did we succeed ? | ||
78 | beq wait_sem @ no - try again | ||
79 | ldmfd sp!, {pc} @ restore regs and return | ||
80 | sdrc_scratchpad_sem: | ||
81 | .word SDRC_SCRATCHPAD_SEM_V | ||
82 | ENTRY(lock_scratchpad_sem_sz) | ||
83 | .word . - lock_scratchpad_sem | ||
84 | 61 | ||
85 | .text | 62 | /* |
86 | /* Function to release the scratchpad semaphore */ | 63 | * API functions |
87 | ENTRY(unlock_scratchpad_sem) | 64 | */ |
88 | stmfd sp!, {lr} @ save registers on stack | ||
89 | ldr r3, sdrc_scratchpad_sem | ||
90 | mov r2,#0 | ||
91 | str r2,[r3] | ||
92 | ldmfd sp!, {pc} @ restore regs and return | ||
93 | ENTRY(unlock_scratchpad_sem_sz) | ||
94 | .word . - unlock_scratchpad_sem | ||
95 | 65 | ||
96 | .text | 66 | .text |
97 | /* Function call to get the restore pointer for resume from OFF */ | 67 | /* Function call to get the restore pointer for resume from OFF */ |
98 | ENTRY(get_restore_pointer) | 68 | ENTRY(get_restore_pointer) |
99 | stmfd sp!, {lr} @ save registers on stack | 69 | stmfd sp!, {lr} @ save registers on stack |
100 | adr r0, restore | 70 | adr r0, restore |
101 | ldmfd sp!, {pc} @ restore regs and return | 71 | ldmfd sp!, {pc} @ restore regs and return |
102 | ENTRY(get_restore_pointer_sz) | 72 | ENTRY(get_restore_pointer_sz) |
103 | .word . - get_restore_pointer | 73 | .word . - get_restore_pointer |
104 | .text | 74 | .text |
105 | /* Function call to get the restore pointer for 3630 resume from OFF */ | 75 | /* Function call to get the restore pointer for 3630 resume from OFF */ |
106 | ENTRY(get_omap3630_restore_pointer) | 76 | ENTRY(get_omap3630_restore_pointer) |
107 | stmfd sp!, {lr} @ save registers on stack | 77 | stmfd sp!, {lr} @ save registers on stack |
108 | adr r0, restore_3630 | 78 | adr r0, restore_3630 |
109 | ldmfd sp!, {pc} @ restore regs and return | 79 | ldmfd sp!, {pc} @ restore regs and return |
110 | ENTRY(get_omap3630_restore_pointer_sz) | 80 | ENTRY(get_omap3630_restore_pointer_sz) |
111 | .word . - get_omap3630_restore_pointer | 81 | .word . - get_omap3630_restore_pointer |
112 | 82 | ||
113 | .text | 83 | .text |
114 | /* | 84 | /* |
115 | * L2 cache needs to be toggled for stable OFF mode functionality on 3630. | 85 | * L2 cache needs to be toggled for stable OFF mode functionality on 3630. |
116 | * This function sets up a fflag that will allow for this toggling to take | 86 | * This function sets up a fflag that will allow for this toggling to take |
117 | * place on 3630. Hopefully some version in the future maynot need this | 87 | * place on 3630. Hopefully some version in the future maynot need this |
118 | */ | 88 | */ |
119 | ENTRY(enable_omap3630_toggle_l2_on_restore) | 89 | ENTRY(enable_omap3630_toggle_l2_on_restore) |
120 | stmfd sp!, {lr} @ save registers on stack | 90 | stmfd sp!, {lr} @ save registers on stack |
121 | /* Setup so that we will disable and enable l2 */ | 91 | /* Setup so that we will disable and enable l2 */ |
122 | mov r1, #0x1 | 92 | mov r1, #0x1 |
123 | str r1, l2dis_3630 | 93 | str r1, l2dis_3630 |
124 | ldmfd sp!, {pc} @ restore regs and return | 94 | ldmfd sp!, {pc} @ restore regs and return |
125 | 95 | ||
126 | .text | 96 | .text |
127 | /* Function call to get the restore pointer for for ES3 to resume from OFF */ | 97 | /* Function call to get the restore pointer for for ES3 to resume from OFF */ |
128 | ENTRY(get_es3_restore_pointer) | 98 | ENTRY(get_es3_restore_pointer) |
129 | stmfd sp!, {lr} @ save registers on stack | 99 | stmfd sp!, {lr} @ save registers on stack |
130 | adr r0, restore_es3 | 100 | adr r0, restore_es3 |
131 | ldmfd sp!, {pc} @ restore regs and return | 101 | ldmfd sp!, {pc} @ restore regs and return |
132 | ENTRY(get_es3_restore_pointer_sz) | 102 | ENTRY(get_es3_restore_pointer_sz) |
133 | .word . - get_es3_restore_pointer | 103 | .word . - get_es3_restore_pointer |
134 | 104 | ||
135 | ENTRY(es3_sdrc_fix) | 105 | ENTRY(es3_sdrc_fix) |
136 | ldr r4, sdrc_syscfg @ get config addr | 106 | ldr r4, sdrc_syscfg @ get config addr |
137 | ldr r5, [r4] @ get value | 107 | ldr r5, [r4] @ get value |
138 | tst r5, #0x100 @ is part access blocked | 108 | tst r5, #0x100 @ is part access blocked |
139 | it eq | 109 | it eq |
140 | biceq r5, r5, #0x100 @ clear bit if set | 110 | biceq r5, r5, #0x100 @ clear bit if set |
141 | str r5, [r4] @ write back change | 111 | str r5, [r4] @ write back change |
142 | ldr r4, sdrc_mr_0 @ get config addr | 112 | ldr r4, sdrc_mr_0 @ get config addr |
143 | ldr r5, [r4] @ get value | 113 | ldr r5, [r4] @ get value |
144 | str r5, [r4] @ write back change | 114 | str r5, [r4] @ write back change |
145 | ldr r4, sdrc_emr2_0 @ get config addr | 115 | ldr r4, sdrc_emr2_0 @ get config addr |
146 | ldr r5, [r4] @ get value | 116 | ldr r5, [r4] @ get value |
147 | str r5, [r4] @ write back change | 117 | str r5, [r4] @ write back change |
148 | ldr r4, sdrc_manual_0 @ get config addr | 118 | ldr r4, sdrc_manual_0 @ get config addr |
149 | mov r5, #0x2 @ autorefresh command | 119 | mov r5, #0x2 @ autorefresh command |
150 | str r5, [r4] @ kick off refreshes | 120 | str r5, [r4] @ kick off refreshes |
151 | ldr r4, sdrc_mr_1 @ get config addr | 121 | ldr r4, sdrc_mr_1 @ get config addr |
152 | ldr r5, [r4] @ get value | 122 | ldr r5, [r4] @ get value |
153 | str r5, [r4] @ write back change | 123 | str r5, [r4] @ write back change |
154 | ldr r4, sdrc_emr2_1 @ get config addr | 124 | ldr r4, sdrc_emr2_1 @ get config addr |
155 | ldr r5, [r4] @ get value | 125 | ldr r5, [r4] @ get value |
156 | str r5, [r4] @ write back change | 126 | str r5, [r4] @ write back change |
157 | ldr r4, sdrc_manual_1 @ get config addr | 127 | ldr r4, sdrc_manual_1 @ get config addr |
158 | mov r5, #0x2 @ autorefresh command | 128 | mov r5, #0x2 @ autorefresh command |
159 | str r5, [r4] @ kick off refreshes | 129 | str r5, [r4] @ kick off refreshes |
160 | bx lr | 130 | bx lr |
161 | sdrc_syscfg: | 131 | sdrc_syscfg: |
162 | .word SDRC_SYSCONFIG_P | 132 | .word SDRC_SYSCONFIG_P |
163 | sdrc_mr_0: | 133 | sdrc_mr_0: |
164 | .word SDRC_MR_0_P | 134 | .word SDRC_MR_0_P |
165 | sdrc_emr2_0: | 135 | sdrc_emr2_0: |
166 | .word SDRC_EMR2_0_P | 136 | .word SDRC_EMR2_0_P |
167 | sdrc_manual_0: | 137 | sdrc_manual_0: |
168 | .word SDRC_MANUAL_0_P | 138 | .word SDRC_MANUAL_0_P |
169 | sdrc_mr_1: | 139 | sdrc_mr_1: |
170 | .word SDRC_MR_1_P | 140 | .word SDRC_MR_1_P |
171 | sdrc_emr2_1: | 141 | sdrc_emr2_1: |
172 | .word SDRC_EMR2_1_P | 142 | .word SDRC_EMR2_1_P |
173 | sdrc_manual_1: | 143 | sdrc_manual_1: |
174 | .word SDRC_MANUAL_1_P | 144 | .word SDRC_MANUAL_1_P |
175 | ENTRY(es3_sdrc_fix_sz) | 145 | ENTRY(es3_sdrc_fix_sz) |
176 | .word . - es3_sdrc_fix | 146 | .word . - es3_sdrc_fix |
177 | 147 | ||
178 | /* Function to call rom code to save secure ram context */ | 148 | /* Function to call rom code to save secure ram context */ |
179 | ENTRY(save_secure_ram_context) | 149 | ENTRY(save_secure_ram_context) |
180 | stmfd sp!, {r1-r12, lr} @ save registers on stack | 150 | stmfd sp!, {r1-r12, lr} @ save registers on stack |
181 | save_secure_ram_debug: | 151 | |
182 | /* b save_secure_ram_debug */ @ enable to debug save code | ||
183 | adr r3, api_params @ r3 points to parameters | 152 | adr r3, api_params @ r3 points to parameters |
184 | str r0, [r3,#0x4] @ r0 has sdram address | 153 | str r0, [r3,#0x4] @ r0 has sdram address |
185 | ldr r12, high_mask | 154 | ldr r12, high_mask |
186 | and r3, r3, r12 | 155 | and r3, r3, r12 |
187 | ldr r12, sram_phy_addr_mask | 156 | ldr r12, sram_phy_addr_mask |
188 | orr r3, r3, r12 | 157 | orr r3, r3, r12 |
189 | mov r0, #25 @ set service ID for PPA | 158 | mov r0, #25 @ set service ID for PPA |
190 | mov r12, r0 @ copy secure service ID in r12 | 159 | mov r12, r0 @ copy secure service ID in r12 |
191 | mov r1, #0 @ set task id for ROM code in r1 | 160 | mov r1, #0 @ set task id for ROM code in r1 |
192 | mov r2, #4 @ set some flags in r2, r6 | 161 | mov r2, #4 @ set some flags in r2, r6 |
193 | mov r6, #0xff | 162 | mov r6, #0xff |
194 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 163 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
195 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 164 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
196 | .word 0xE1600071 @ call SMI monitor (smi #1) | 165 | .word 0xE1600071 @ call SMI monitor (smi #1) |
197 | nop | 166 | nop |
198 | nop | 167 | nop |
199 | nop | 168 | nop |
200 | nop | 169 | nop |
201 | ldmfd sp!, {r1-r12, pc} | 170 | ldmfd sp!, {r1-r12, pc} |
202 | sram_phy_addr_mask: | 171 | sram_phy_addr_mask: |
203 | .word SRAM_BASE_P | 172 | .word SRAM_BASE_P |
204 | high_mask: | 173 | high_mask: |
205 | .word 0xffff | 174 | .word 0xffff |
206 | api_params: | 175 | api_params: |
207 | .word 0x4, 0x0, 0x0, 0x1, 0x1 | 176 | .word 0x4, 0x0, 0x0, 0x1, 0x1 |
208 | ENTRY(save_secure_ram_context_sz) | 177 | ENTRY(save_secure_ram_context_sz) |
209 | .word . - save_secure_ram_context | 178 | .word . - save_secure_ram_context |
210 | 179 | ||
211 | /* | 180 | /* |
212 | * Forces OMAP into idle state | 181 | * Forces OMAP into idle state |
213 | * | 182 | * |
214 | * omap34xx_suspend() - This bit of code just executes the WFI | 183 | * omap34xx_suspend() - This bit of code just executes the WFI |
215 | * for normal idles. | 184 | * for normal idles. |
216 | * | 185 | * |
217 | * Note: This code get's copied to internal SRAM at boot. When the OMAP | 186 | * Note: This code get's copied to internal SRAM at boot. When the OMAP |
218 | * wakes up it continues execution at the point it went to sleep. | 187 | * wakes up it continues execution at the point it went to sleep. |
219 | */ | 188 | */ |
220 | ENTRY(omap34xx_cpu_suspend) | 189 | ENTRY(omap34xx_cpu_suspend) |
221 | stmfd sp!, {r0-r12, lr} @ save registers on stack | 190 | stmfd sp!, {r0-r12, lr} @ save registers on stack |
222 | loop: | 191 | |
223 | /*b loop*/ @Enable to debug by stepping through code | ||
224 | /* r0 contains restore pointer in sdram */ | 192 | /* r0 contains restore pointer in sdram */ |
225 | /* r1 contains information about saving context */ | 193 | /* r1 contains information about saving context */ |
226 | ldr r4, sdrc_power @ read the SDRC_POWER register | 194 | ldr r4, sdrc_power @ read the SDRC_POWER register |
227 | ldr r5, [r4] @ read the contents of SDRC_POWER | 195 | ldr r5, [r4] @ read the contents of SDRC_POWER |
228 | orr r5, r5, #0x40 @ enable self refresh on idle req | 196 | orr r5, r5, #0x40 @ enable self refresh on idle req |
229 | str r5, [r4] @ write back to SDRC_POWER register | 197 | str r5, [r4] @ write back to SDRC_POWER register |
230 | 198 | ||
231 | cmp r1, #0x0 | 199 | cmp r1, #0x0 |
232 | /* If context save is required, do that and execute wfi */ | 200 | /* If context save is required, do that and execute wfi */ |
233 | bne save_context_wfi | 201 | bne save_context_wfi |
234 | /* Data memory barrier and Data sync barrier */ | 202 | /* Data memory barrier and Data sync barrier */ |
235 | mov r1, #0 | 203 | mov r1, #0 |
236 | mcr p15, 0, r1, c7, c10, 4 | 204 | mcr p15, 0, r1, c7, c10, 4 |
237 | mcr p15, 0, r1, c7, c10, 5 | 205 | mcr p15, 0, r1, c7, c10, 5 |
238 | 206 | ||
239 | wfi @ wait for interrupt | 207 | wfi @ wait for interrupt |
240 | 208 | ||
241 | nop | 209 | nop |
242 | nop | 210 | nop |
243 | nop | 211 | nop |
244 | nop | 212 | nop |
245 | nop | 213 | nop |
246 | nop | 214 | nop |
247 | nop | 215 | nop |
248 | nop | 216 | nop |
249 | nop | 217 | nop |
250 | nop | 218 | nop |
251 | bl wait_sdrc_ok | 219 | bl wait_sdrc_ok |
252 | 220 | ||
253 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 221 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
254 | restore_es3: | 222 | restore_es3: |
255 | /*b restore_es3*/ @ Enable to debug restore code | ||
256 | ldr r5, pm_prepwstst_core_p | 223 | ldr r5, pm_prepwstst_core_p |
257 | ldr r4, [r5] | 224 | ldr r4, [r5] |
258 | and r4, r4, #0x3 | 225 | and r4, r4, #0x3 |
259 | cmp r4, #0x0 @ Check if previous power state of CORE is OFF | 226 | cmp r4, #0x0 @ Check if previous power state of CORE is OFF |
260 | bne restore | 227 | bne restore |
261 | adr r0, es3_sdrc_fix | 228 | adr r0, es3_sdrc_fix |
262 | ldr r1, sram_base | 229 | ldr r1, sram_base |
263 | ldr r2, es3_sdrc_fix_sz | 230 | ldr r2, es3_sdrc_fix_sz |
264 | mov r2, r2, ror #2 | 231 | mov r2, r2, ror #2 |
265 | copy_to_sram: | 232 | copy_to_sram: |
266 | ldmia r0!, {r3} @ val = *src | 233 | ldmia r0!, {r3} @ val = *src |
267 | stmia r1!, {r3} @ *dst = val | 234 | stmia r1!, {r3} @ *dst = val |
268 | subs r2, r2, #0x1 @ num_words-- | 235 | subs r2, r2, #0x1 @ num_words-- |
269 | bne copy_to_sram | 236 | bne copy_to_sram |
270 | ldr r1, sram_base | 237 | ldr r1, sram_base |
271 | blx r1 | 238 | blx r1 |
272 | b restore | 239 | b restore |
273 | 240 | ||
274 | restore_3630: | 241 | restore_3630: |
275 | /*b restore_es3630*/ @ Enable to debug restore code | ||
276 | ldr r1, pm_prepwstst_core_p | 242 | ldr r1, pm_prepwstst_core_p |
277 | ldr r2, [r1] | 243 | ldr r2, [r1] |
278 | and r2, r2, #0x3 | 244 | and r2, r2, #0x3 |
279 | cmp r2, #0x0 @ Check if previous power state of CORE is OFF | 245 | cmp r2, #0x0 @ Check if previous power state of CORE is OFF |
280 | bne restore | 246 | bne restore |
281 | /* Disable RTA before giving control */ | 247 | /* Disable RTA before giving control */ |
282 | ldr r1, control_mem_rta | 248 | ldr r1, control_mem_rta |
283 | mov r2, #OMAP36XX_RTA_DISABLE | 249 | mov r2, #OMAP36XX_RTA_DISABLE |
284 | str r2, [r1] | 250 | str r2, [r1] |
285 | /* Fall thru for the remaining logic */ | 251 | /* Fall thru for the remaining logic */ |
286 | restore: | 252 | restore: |
287 | /* b restore*/ @ Enable to debug restore code | ||
288 | /* Check what was the reason for mpu reset and store the reason in r9*/ | 253 | /* Check what was the reason for mpu reset and store the reason in r9*/ |
289 | /* 1 - Only L1 and logic lost */ | 254 | /* 1 - Only L1 and logic lost */ |
290 | /* 2 - Only L2 lost - In this case, we wont be here */ | 255 | /* 2 - Only L2 lost - In this case, we wont be here */ |
291 | /* 3 - Both L1 and L2 lost */ | 256 | /* 3 - Both L1 and L2 lost */ |
292 | ldr r1, pm_pwstctrl_mpu | 257 | ldr r1, pm_pwstctrl_mpu |
293 | ldr r2, [r1] | 258 | ldr r2, [r1] |
294 | and r2, r2, #0x3 | 259 | and r2, r2, #0x3 |
295 | cmp r2, #0x0 @ Check if target power state was OFF or RET | 260 | cmp r2, #0x0 @ Check if target power state was OFF or RET |
296 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost | 261 | moveq r9, #0x3 @ MPU OFF => L1 and L2 lost |
297 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation | 262 | movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation |
298 | bne logic_l1_restore | 263 | bne logic_l1_restore |
299 | 264 | ||
300 | ldr r0, l2dis_3630 | 265 | ldr r0, l2dis_3630 |
301 | cmp r0, #0x1 @ should we disable L2 on 3630? | 266 | cmp r0, #0x1 @ should we disable L2 on 3630? |
302 | bne skipl2dis | 267 | bne skipl2dis |
303 | mrc p15, 0, r0, c1, c0, 1 | 268 | mrc p15, 0, r0, c1, c0, 1 |
304 | bic r0, r0, #2 @ disable L2 cache | 269 | bic r0, r0, #2 @ disable L2 cache |
305 | mcr p15, 0, r0, c1, c0, 1 | 270 | mcr p15, 0, r0, c1, c0, 1 |
306 | skipl2dis: | 271 | skipl2dis: |
307 | ldr r0, control_stat | 272 | ldr r0, control_stat |
308 | ldr r1, [r0] | 273 | ldr r1, [r0] |
309 | and r1, #0x700 | 274 | and r1, #0x700 |
310 | cmp r1, #0x300 | 275 | cmp r1, #0x300 |
311 | beq l2_inv_gp | 276 | beq l2_inv_gp |
312 | mov r0, #40 @ set service ID for PPA | 277 | mov r0, #40 @ set service ID for PPA |
313 | mov r12, r0 @ copy secure Service ID in r12 | 278 | mov r12, r0 @ copy secure Service ID in r12 |
314 | mov r1, #0 @ set task id for ROM code in r1 | 279 | mov r1, #0 @ set task id for ROM code in r1 |
315 | mov r2, #4 @ set some flags in r2, r6 | 280 | mov r2, #4 @ set some flags in r2, r6 |
316 | mov r6, #0xff | 281 | mov r6, #0xff |
317 | adr r3, l2_inv_api_params @ r3 points to dummy parameters | 282 | adr r3, l2_inv_api_params @ r3 points to dummy parameters |
318 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 283 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
319 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 284 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
320 | .word 0xE1600071 @ call SMI monitor (smi #1) | 285 | .word 0xE1600071 @ call SMI monitor (smi #1) |
321 | /* Write to Aux control register to set some bits */ | 286 | /* Write to Aux control register to set some bits */ |
322 | mov r0, #42 @ set service ID for PPA | 287 | mov r0, #42 @ set service ID for PPA |
323 | mov r12, r0 @ copy secure Service ID in r12 | 288 | mov r12, r0 @ copy secure Service ID in r12 |
324 | mov r1, #0 @ set task id for ROM code in r1 | 289 | mov r1, #0 @ set task id for ROM code in r1 |
325 | mov r2, #4 @ set some flags in r2, r6 | 290 | mov r2, #4 @ set some flags in r2, r6 |
326 | mov r6, #0xff | 291 | mov r6, #0xff |
327 | ldr r4, scratchpad_base | 292 | ldr r4, scratchpad_base |
328 | ldr r3, [r4, #0xBC] @ r3 points to parameters | 293 | ldr r3, [r4, #0xBC] @ r3 points to parameters |
329 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 294 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
330 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 295 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
331 | .word 0xE1600071 @ call SMI monitor (smi #1) | 296 | .word 0xE1600071 @ call SMI monitor (smi #1) |
332 | 297 | ||
333 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE | 298 | #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE |
334 | /* Restore L2 aux control register */ | 299 | /* Restore L2 aux control register */ |
335 | @ set service ID for PPA | 300 | @ set service ID for PPA |
336 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID | 301 | mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID |
337 | mov r12, r0 @ copy service ID in r12 | 302 | mov r12, r0 @ copy service ID in r12 |
338 | mov r1, #0 @ set task ID for ROM code in r1 | 303 | mov r1, #0 @ set task ID for ROM code in r1 |
339 | mov r2, #4 @ set some flags in r2, r6 | 304 | mov r2, #4 @ set some flags in r2, r6 |
340 | mov r6, #0xff | 305 | mov r6, #0xff |
341 | ldr r4, scratchpad_base | 306 | ldr r4, scratchpad_base |
342 | ldr r3, [r4, #0xBC] | 307 | ldr r3, [r4, #0xBC] |
343 | adds r3, r3, #8 @ r3 points to parameters | 308 | adds r3, r3, #8 @ r3 points to parameters |
344 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier | 309 | mcr p15, 0, r0, c7, c10, 4 @ data write barrier |
345 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier | 310 | mcr p15, 0, r0, c7, c10, 5 @ data memory barrier |
346 | .word 0xE1600071 @ call SMI monitor (smi #1) | 311 | .word 0xE1600071 @ call SMI monitor (smi #1) |
347 | #endif | 312 | #endif |
348 | b logic_l1_restore | 313 | b logic_l1_restore |
349 | l2_inv_api_params: | 314 | l2_inv_api_params: |
350 | .word 0x1, 0x00 | 315 | .word 0x1, 0x00 |
351 | l2_inv_gp: | 316 | l2_inv_gp: |
352 | /* Execute smi to invalidate L2 cache */ | 317 | /* Execute smi to invalidate L2 cache */ |
353 | mov r12, #0x1 @ set up to invalide L2 | 318 | mov r12, #0x1 @ set up to invalide L2 |
354 | smi: .word 0xE1600070 @ Call SMI monitor (smieq) | 319 | smi: .word 0xE1600070 @ Call SMI monitor (smieq) |
355 | /* Write to Aux control register to set some bits */ | 320 | /* Write to Aux control register to set some bits */ |
356 | ldr r4, scratchpad_base | 321 | ldr r4, scratchpad_base |
357 | ldr r3, [r4,#0xBC] | 322 | ldr r3, [r4,#0xBC] |
358 | ldr r0, [r3,#4] | 323 | ldr r0, [r3,#4] |
359 | mov r12, #0x3 | 324 | mov r12, #0x3 |
360 | .word 0xE1600070 @ Call SMI monitor (smieq) | 325 | .word 0xE1600070 @ Call SMI monitor (smieq) |
361 | ldr r4, scratchpad_base | 326 | ldr r4, scratchpad_base |
362 | ldr r3, [r4,#0xBC] | 327 | ldr r3, [r4,#0xBC] |
363 | ldr r0, [r3,#12] | 328 | ldr r0, [r3,#12] |
364 | mov r12, #0x2 | 329 | mov r12, #0x2 |
365 | .word 0xE1600070 @ Call SMI monitor (smieq) | 330 | .word 0xE1600070 @ Call SMI monitor (smieq) |
366 | logic_l1_restore: | 331 | logic_l1_restore: |
367 | ldr r1, l2dis_3630 | 332 | ldr r1, l2dis_3630 |
368 | cmp r1, #0x1 @ Do we need to re-enable L2 on 3630? | 333 | cmp r1, #0x1 @ Do we need to re-enable L2 on 3630? |
369 | bne skipl2reen | 334 | bne skipl2reen |
370 | mrc p15, 0, r1, c1, c0, 1 | 335 | mrc p15, 0, r1, c1, c0, 1 |
371 | orr r1, r1, #2 @ re-enable L2 cache | 336 | orr r1, r1, #2 @ re-enable L2 cache |
372 | mcr p15, 0, r1, c1, c0, 1 | 337 | mcr p15, 0, r1, c1, c0, 1 |
373 | skipl2reen: | 338 | skipl2reen: |
374 | mov r1, #0 | 339 | mov r1, #0 |
375 | /* Invalidate all instruction caches to PoU | 340 | /* Invalidate all instruction caches to PoU |
376 | * and flush branch target cache */ | 341 | * and flush branch target cache */ |
377 | mcr p15, 0, r1, c7, c5, 0 | 342 | mcr p15, 0, r1, c7, c5, 0 |
378 | 343 | ||
379 | ldr r4, scratchpad_base | 344 | ldr r4, scratchpad_base |
380 | ldr r3, [r4,#0xBC] | 345 | ldr r3, [r4,#0xBC] |
381 | adds r3, r3, #16 | 346 | adds r3, r3, #16 |
382 | ldmia r3!, {r4-r6} | 347 | ldmia r3!, {r4-r6} |
383 | mov sp, r4 | 348 | mov sp, r4 |
384 | msr spsr_cxsf, r5 | 349 | msr spsr_cxsf, r5 |
385 | mov lr, r6 | 350 | mov lr, r6 |
386 | 351 | ||
387 | ldmia r3!, {r4-r9} | 352 | ldmia r3!, {r4-r9} |
388 | /* Coprocessor access Control Register */ | 353 | /* Coprocessor access Control Register */ |
389 | mcr p15, 0, r4, c1, c0, 2 | 354 | mcr p15, 0, r4, c1, c0, 2 |
390 | 355 | ||
391 | /* TTBR0 */ | 356 | /* TTBR0 */ |
392 | MCR p15, 0, r5, c2, c0, 0 | 357 | MCR p15, 0, r5, c2, c0, 0 |
393 | /* TTBR1 */ | 358 | /* TTBR1 */ |
394 | MCR p15, 0, r6, c2, c0, 1 | 359 | MCR p15, 0, r6, c2, c0, 1 |
395 | /* Translation table base control register */ | 360 | /* Translation table base control register */ |
396 | MCR p15, 0, r7, c2, c0, 2 | 361 | MCR p15, 0, r7, c2, c0, 2 |
397 | /*domain access Control Register */ | 362 | /*domain access Control Register */ |
398 | MCR p15, 0, r8, c3, c0, 0 | 363 | MCR p15, 0, r8, c3, c0, 0 |
399 | /* data fault status Register */ | 364 | /* data fault status Register */ |
400 | MCR p15, 0, r9, c5, c0, 0 | 365 | MCR p15, 0, r9, c5, c0, 0 |
401 | 366 | ||
402 | ldmia r3!,{r4-r8} | 367 | ldmia r3!,{r4-r8} |
403 | /* instruction fault status Register */ | 368 | /* instruction fault status Register */ |
404 | MCR p15, 0, r4, c5, c0, 1 | 369 | MCR p15, 0, r4, c5, c0, 1 |
405 | /*Data Auxiliary Fault Status Register */ | 370 | /*Data Auxiliary Fault Status Register */ |
406 | MCR p15, 0, r5, c5, c1, 0 | 371 | MCR p15, 0, r5, c5, c1, 0 |
407 | /*Instruction Auxiliary Fault Status Register*/ | 372 | /*Instruction Auxiliary Fault Status Register*/ |
408 | MCR p15, 0, r6, c5, c1, 1 | 373 | MCR p15, 0, r6, c5, c1, 1 |
409 | /*Data Fault Address Register */ | 374 | /*Data Fault Address Register */ |
410 | MCR p15, 0, r7, c6, c0, 0 | 375 | MCR p15, 0, r7, c6, c0, 0 |
411 | /*Instruction Fault Address Register*/ | 376 | /*Instruction Fault Address Register*/ |
412 | MCR p15, 0, r8, c6, c0, 2 | 377 | MCR p15, 0, r8, c6, c0, 2 |
413 | ldmia r3!,{r4-r7} | 378 | ldmia r3!,{r4-r7} |
414 | 379 | ||
415 | /* user r/w thread and process ID */ | 380 | /* user r/w thread and process ID */ |
416 | MCR p15, 0, r4, c13, c0, 2 | 381 | MCR p15, 0, r4, c13, c0, 2 |
417 | /* user ro thread and process ID */ | 382 | /* user ro thread and process ID */ |
418 | MCR p15, 0, r5, c13, c0, 3 | 383 | MCR p15, 0, r5, c13, c0, 3 |
419 | /*Privileged only thread and process ID */ | 384 | /*Privileged only thread and process ID */ |
420 | MCR p15, 0, r6, c13, c0, 4 | 385 | MCR p15, 0, r6, c13, c0, 4 |
421 | /* cache size selection */ | 386 | /* cache size selection */ |
422 | MCR p15, 2, r7, c0, c0, 0 | 387 | MCR p15, 2, r7, c0, c0, 0 |
423 | ldmia r3!,{r4-r8} | 388 | ldmia r3!,{r4-r8} |
424 | /* Data TLB lockdown registers */ | 389 | /* Data TLB lockdown registers */ |
425 | MCR p15, 0, r4, c10, c0, 0 | 390 | MCR p15, 0, r4, c10, c0, 0 |
426 | /* Instruction TLB lockdown registers */ | 391 | /* Instruction TLB lockdown registers */ |
427 | MCR p15, 0, r5, c10, c0, 1 | 392 | MCR p15, 0, r5, c10, c0, 1 |
428 | /* Secure or Nonsecure Vector Base Address */ | 393 | /* Secure or Nonsecure Vector Base Address */ |
429 | MCR p15, 0, r6, c12, c0, 0 | 394 | MCR p15, 0, r6, c12, c0, 0 |
430 | /* FCSE PID */ | 395 | /* FCSE PID */ |
431 | MCR p15, 0, r7, c13, c0, 0 | 396 | MCR p15, 0, r7, c13, c0, 0 |
432 | /* Context PID */ | 397 | /* Context PID */ |
433 | MCR p15, 0, r8, c13, c0, 1 | 398 | MCR p15, 0, r8, c13, c0, 1 |
434 | 399 | ||
435 | ldmia r3!,{r4-r5} | 400 | ldmia r3!,{r4-r5} |
436 | /* primary memory remap register */ | 401 | /* primary memory remap register */ |
437 | MCR p15, 0, r4, c10, c2, 0 | 402 | MCR p15, 0, r4, c10, c2, 0 |
438 | /*normal memory remap register */ | 403 | /*normal memory remap register */ |
439 | MCR p15, 0, r5, c10, c2, 1 | 404 | MCR p15, 0, r5, c10, c2, 1 |
440 | 405 | ||
441 | /* Restore cpsr */ | 406 | /* Restore cpsr */ |
442 | ldmia r3!,{r4} /*load CPSR from SDRAM*/ | 407 | ldmia r3!,{r4} /*load CPSR from SDRAM*/ |
443 | msr cpsr, r4 /*store cpsr */ | 408 | msr cpsr, r4 /*store cpsr */ |
444 | 409 | ||
445 | /* Enabling MMU here */ | 410 | /* Enabling MMU here */ |
446 | mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ | 411 | mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ |
447 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ | 412 | /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ |
448 | and r7, #0x7 | 413 | and r7, #0x7 |
449 | cmp r7, #0x0 | 414 | cmp r7, #0x0 |
450 | beq usettbr0 | 415 | beq usettbr0 |
451 | ttbr_error: | 416 | ttbr_error: |
452 | /* More work needs to be done to support N[0:2] value other than 0 | 417 | /* More work needs to be done to support N[0:2] value other than 0 |
453 | * So looping here so that the error can be detected | 418 | * So looping here so that the error can be detected |
454 | */ | 419 | */ |
455 | b ttbr_error | 420 | b ttbr_error |
456 | usettbr0: | 421 | usettbr0: |
457 | mrc p15, 0, r2, c2, c0, 0 | 422 | mrc p15, 0, r2, c2, c0, 0 |
458 | ldr r5, ttbrbit_mask | 423 | ldr r5, ttbrbit_mask |
459 | and r2, r5 | 424 | and r2, r5 |
460 | mov r4, pc | 425 | mov r4, pc |
461 | ldr r5, table_index_mask | 426 | ldr r5, table_index_mask |
462 | and r4, r5 /* r4 = 31 to 20 bits of pc */ | 427 | and r4, r5 /* r4 = 31 to 20 bits of pc */ |
463 | /* Extract the value to be written to table entry */ | 428 | /* Extract the value to be written to table entry */ |
464 | ldr r1, table_entry | 429 | ldr r1, table_entry |
465 | add r1, r1, r4 /* r1 has value to be written to table entry*/ | 430 | add r1, r1, r4 /* r1 has value to be written to table entry*/ |
466 | /* Getting the address of table entry to modify */ | 431 | /* Getting the address of table entry to modify */ |
467 | lsr r4, #18 | 432 | lsr r4, #18 |
468 | add r2, r4 /* r2 has the location which needs to be modified */ | 433 | add r2, r4 /* r2 has the location which needs to be modified */ |
469 | /* Storing previous entry of location being modified */ | 434 | /* Storing previous entry of location being modified */ |
470 | ldr r5, scratchpad_base | 435 | ldr r5, scratchpad_base |
471 | ldr r4, [r2] | 436 | ldr r4, [r2] |
472 | str r4, [r5, #0xC0] | 437 | str r4, [r5, #0xC0] |
473 | /* Modify the table entry */ | 438 | /* Modify the table entry */ |
474 | str r1, [r2] | 439 | str r1, [r2] |
475 | /* Storing address of entry being modified | 440 | /* Storing address of entry being modified |
476 | * - will be restored after enabling MMU */ | 441 | * - will be restored after enabling MMU */ |
477 | ldr r5, scratchpad_base | 442 | ldr r5, scratchpad_base |
478 | str r2, [r5, #0xC4] | 443 | str r2, [r5, #0xC4] |
479 | 444 | ||
480 | mov r0, #0 | 445 | mov r0, #0 |
481 | mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer | 446 | mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer |
482 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array | 447 | mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array |
483 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB | 448 | mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB |
484 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB | 449 | mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB |
485 | /* Restore control register but dont enable caches here*/ | 450 | /* Restore control register but dont enable caches here*/ |
486 | /* Caches will be enabled after restoring MMU table entry */ | 451 | /* Caches will be enabled after restoring MMU table entry */ |
487 | ldmia r3!, {r4} | 452 | ldmia r3!, {r4} |
488 | /* Store previous value of control register in scratchpad */ | 453 | /* Store previous value of control register in scratchpad */ |
489 | str r4, [r5, #0xC8] | 454 | str r4, [r5, #0xC8] |
490 | ldr r2, cache_pred_disable_mask | 455 | ldr r2, cache_pred_disable_mask |
491 | and r4, r2 | 456 | and r4, r2 |
492 | mcr p15, 0, r4, c1, c0, 0 | 457 | mcr p15, 0, r4, c1, c0, 0 |
493 | 458 | ||
494 | ldmfd sp!, {r0-r12, pc} @ restore regs and return | 459 | ldmfd sp!, {r0-r12, pc} @ restore regs and return |
495 | save_context_wfi: | 460 | save_context_wfi: |
496 | /*b save_context_wfi*/ @ enable to debug save code | ||
497 | mov r8, r0 /* Store SDRAM address in r8 */ | 461 | mov r8, r0 /* Store SDRAM address in r8 */ |
498 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register | 462 | mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register |
499 | mov r4, #0x1 @ Number of parameters for restore call | 463 | mov r4, #0x1 @ Number of parameters for restore call |
500 | stmia r8!, {r4-r5} @ Push parameters for restore call | 464 | stmia r8!, {r4-r5} @ Push parameters for restore call |
501 | mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register | 465 | mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register |
502 | stmia r8!, {r4-r5} @ Push parameters for restore call | 466 | stmia r8!, {r4-r5} @ Push parameters for restore call |
503 | /* Check what that target sleep state is:stored in r1*/ | 467 | /* Check what that target sleep state is:stored in r1*/ |
504 | /* 1 - Only L1 and logic lost */ | 468 | /* 1 - Only L1 and logic lost */ |
505 | /* 2 - Only L2 lost */ | 469 | /* 2 - Only L2 lost */ |
506 | /* 3 - Both L1 and L2 lost */ | 470 | /* 3 - Both L1 and L2 lost */ |
507 | cmp r1, #0x2 /* Only L2 lost */ | 471 | cmp r1, #0x2 /* Only L2 lost */ |
508 | beq clean_l2 | 472 | beq clean_l2 |
509 | cmp r1, #0x1 /* L2 retained */ | 473 | cmp r1, #0x1 /* L2 retained */ |
510 | /* r9 stores whether to clean L2 or not*/ | 474 | /* r9 stores whether to clean L2 or not*/ |
511 | moveq r9, #0x0 /* Dont Clean L2 */ | 475 | moveq r9, #0x0 /* Dont Clean L2 */ |
512 | movne r9, #0x1 /* Clean L2 */ | 476 | movne r9, #0x1 /* Clean L2 */ |
513 | l1_logic_lost: | 477 | l1_logic_lost: |
514 | /* Store sp and spsr to SDRAM */ | 478 | /* Store sp and spsr to SDRAM */ |
515 | mov r4, sp | 479 | mov r4, sp |
516 | mrs r5, spsr | 480 | mrs r5, spsr |
517 | mov r6, lr | 481 | mov r6, lr |
518 | stmia r8!, {r4-r6} | 482 | stmia r8!, {r4-r6} |
519 | /* Save all ARM registers */ | 483 | /* Save all ARM registers */ |
520 | /* Coprocessor access control register */ | 484 | /* Coprocessor access control register */ |
521 | mrc p15, 0, r6, c1, c0, 2 | 485 | mrc p15, 0, r6, c1, c0, 2 |
522 | stmia r8!, {r6} | 486 | stmia r8!, {r6} |
523 | /* TTBR0, TTBR1 and Translation table base control */ | 487 | /* TTBR0, TTBR1 and Translation table base control */ |
524 | mrc p15, 0, r4, c2, c0, 0 | 488 | mrc p15, 0, r4, c2, c0, 0 |
525 | mrc p15, 0, r5, c2, c0, 1 | 489 | mrc p15, 0, r5, c2, c0, 1 |
526 | mrc p15, 0, r6, c2, c0, 2 | 490 | mrc p15, 0, r6, c2, c0, 2 |
527 | stmia r8!, {r4-r6} | 491 | stmia r8!, {r4-r6} |
528 | /* Domain access control register, data fault status register, | 492 | /* Domain access control register, data fault status register, |
529 | and instruction fault status register */ | 493 | and instruction fault status register */ |
530 | mrc p15, 0, r4, c3, c0, 0 | 494 | mrc p15, 0, r4, c3, c0, 0 |
531 | mrc p15, 0, r5, c5, c0, 0 | 495 | mrc p15, 0, r5, c5, c0, 0 |
532 | mrc p15, 0, r6, c5, c0, 1 | 496 | mrc p15, 0, r6, c5, c0, 1 |
533 | stmia r8!, {r4-r6} | 497 | stmia r8!, {r4-r6} |
534 | /* Data aux fault status register, instruction aux fault status, | 498 | /* Data aux fault status register, instruction aux fault status, |
535 | datat fault address register and instruction fault address register*/ | 499 | datat fault address register and instruction fault address register*/ |
536 | mrc p15, 0, r4, c5, c1, 0 | 500 | mrc p15, 0, r4, c5, c1, 0 |
537 | mrc p15, 0, r5, c5, c1, 1 | 501 | mrc p15, 0, r5, c5, c1, 1 |
538 | mrc p15, 0, r6, c6, c0, 0 | 502 | mrc p15, 0, r6, c6, c0, 0 |
539 | mrc p15, 0, r7, c6, c0, 2 | 503 | mrc p15, 0, r7, c6, c0, 2 |
540 | stmia r8!, {r4-r7} | 504 | stmia r8!, {r4-r7} |
541 | /* user r/w thread and process ID, user r/o thread and process ID, | 505 | /* user r/w thread and process ID, user r/o thread and process ID, |
542 | priv only thread and process ID, cache size selection */ | 506 | priv only thread and process ID, cache size selection */ |
543 | mrc p15, 0, r4, c13, c0, 2 | 507 | mrc p15, 0, r4, c13, c0, 2 |
544 | mrc p15, 0, r5, c13, c0, 3 | 508 | mrc p15, 0, r5, c13, c0, 3 |
545 | mrc p15, 0, r6, c13, c0, 4 | 509 | mrc p15, 0, r6, c13, c0, 4 |
546 | mrc p15, 2, r7, c0, c0, 0 | 510 | mrc p15, 2, r7, c0, c0, 0 |
547 | stmia r8!, {r4-r7} | 511 | stmia r8!, {r4-r7} |
548 | /* Data TLB lockdown, instruction TLB lockdown registers */ | 512 | /* Data TLB lockdown, instruction TLB lockdown registers */ |
549 | mrc p15, 0, r5, c10, c0, 0 | 513 | mrc p15, 0, r5, c10, c0, 0 |
550 | mrc p15, 0, r6, c10, c0, 1 | 514 | mrc p15, 0, r6, c10, c0, 1 |
551 | stmia r8!, {r5-r6} | 515 | stmia r8!, {r5-r6} |
552 | /* Secure or non secure vector base address, FCSE PID, Context PID*/ | 516 | /* Secure or non secure vector base address, FCSE PID, Context PID*/ |
553 | mrc p15, 0, r4, c12, c0, 0 | 517 | mrc p15, 0, r4, c12, c0, 0 |
554 | mrc p15, 0, r5, c13, c0, 0 | 518 | mrc p15, 0, r5, c13, c0, 0 |
555 | mrc p15, 0, r6, c13, c0, 1 | 519 | mrc p15, 0, r6, c13, c0, 1 |
556 | stmia r8!, {r4-r6} | 520 | stmia r8!, {r4-r6} |
557 | /* Primary remap, normal remap registers */ | 521 | /* Primary remap, normal remap registers */ |
558 | mrc p15, 0, r4, c10, c2, 0 | 522 | mrc p15, 0, r4, c10, c2, 0 |
559 | mrc p15, 0, r5, c10, c2, 1 | 523 | mrc p15, 0, r5, c10, c2, 1 |
560 | stmia r8!,{r4-r5} | 524 | stmia r8!,{r4-r5} |
561 | 525 | ||
562 | /* Store current cpsr*/ | 526 | /* Store current cpsr*/ |
563 | mrs r2, cpsr | 527 | mrs r2, cpsr |
564 | stmia r8!, {r2} | 528 | stmia r8!, {r2} |
565 | 529 | ||
566 | mrc p15, 0, r4, c1, c0, 0 | 530 | mrc p15, 0, r4, c1, c0, 0 |
567 | /* save control register */ | 531 | /* save control register */ |
568 | stmia r8!, {r4} | 532 | stmia r8!, {r4} |
569 | clean_caches: | 533 | clean_caches: |
570 | /* Clean Data or unified cache to POU*/ | 534 | /* Clean Data or unified cache to POU*/ |
571 | /* How to invalidate only L1 cache???? - #FIX_ME# */ | 535 | /* How to invalidate only L1 cache???? - #FIX_ME# */ |
572 | /* mcr p15, 0, r11, c7, c11, 1 */ | 536 | /* mcr p15, 0, r11, c7, c11, 1 */ |
573 | cmp r9, #1 /* Check whether L2 inval is required or not*/ | 537 | cmp r9, #1 /* Check whether L2 inval is required or not*/ |
574 | bne skip_l2_inval | 538 | bne skip_l2_inval |
575 | clean_l2: | 539 | clean_l2: |
576 | /* | 540 | /* |
577 | * Jump out to kernel flush routine | 541 | * Jump out to kernel flush routine |
578 | * - reuse that code is better | 542 | * - reuse that code is better |
579 | * - it executes in a cached space so is faster than refetch per-block | 543 | * - it executes in a cached space so is faster than refetch per-block |
580 | * - should be faster and will change with kernel | 544 | * - should be faster and will change with kernel |
581 | * - 'might' have to copy address, load and jump to it | 545 | * - 'might' have to copy address, load and jump to it |
582 | * - lr is used since we are running in SRAM currently. | 546 | * - lr is used since we are running in SRAM currently. |
583 | */ | 547 | */ |
584 | ldr r1, kernel_flush | 548 | ldr r1, kernel_flush |
585 | mov lr, pc | 549 | mov lr, pc |
586 | bx r1 | 550 | bx r1 |
587 | 551 | ||
588 | skip_l2_inval: | 552 | skip_l2_inval: |
589 | /* Data memory barrier and Data sync barrier */ | 553 | /* Data memory barrier and Data sync barrier */ |
590 | mov r1, #0 | 554 | mov r1, #0 |
591 | mcr p15, 0, r1, c7, c10, 4 | 555 | mcr p15, 0, r1, c7, c10, 4 |
592 | mcr p15, 0, r1, c7, c10, 5 | 556 | mcr p15, 0, r1, c7, c10, 5 |
593 | 557 | ||
594 | wfi @ wait for interrupt | 558 | wfi @ wait for interrupt |
595 | nop | 559 | nop |
596 | nop | 560 | nop |
597 | nop | 561 | nop |
598 | nop | 562 | nop |
599 | nop | 563 | nop |
600 | nop | 564 | nop |
601 | nop | 565 | nop |
602 | nop | 566 | nop |
603 | nop | 567 | nop |
604 | nop | 568 | nop |
605 | bl wait_sdrc_ok | 569 | bl wait_sdrc_ok |
606 | /* restore regs and return */ | 570 | /* restore regs and return */ |
607 | ldmfd sp!, {r0-r12, pc} | 571 | ldmfd sp!, {r0-r12, pc} |
608 | 572 | ||
609 | /* Make sure SDRC accesses are ok */ | 573 | /* Make sure SDRC accesses are ok */ |
610 | wait_sdrc_ok: | 574 | wait_sdrc_ok: |
611 | 575 | ||
612 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */ | 576 | /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */ |
613 | ldr r4, cm_idlest_ckgen | 577 | ldr r4, cm_idlest_ckgen |
614 | wait_dpll3_lock: | 578 | wait_dpll3_lock: |
615 | ldr r5, [r4] | 579 | ldr r5, [r4] |
616 | tst r5, #1 | 580 | tst r5, #1 |
617 | beq wait_dpll3_lock | 581 | beq wait_dpll3_lock |
618 | 582 | ||
619 | ldr r4, cm_idlest1_core | 583 | ldr r4, cm_idlest1_core |
620 | wait_sdrc_ready: | 584 | wait_sdrc_ready: |
621 | ldr r5, [r4] | 585 | ldr r5, [r4] |
622 | tst r5, #0x2 | 586 | tst r5, #0x2 |
623 | bne wait_sdrc_ready | 587 | bne wait_sdrc_ready |
624 | /* allow DLL powerdown upon hw idle req */ | 588 | /* allow DLL powerdown upon hw idle req */ |
625 | ldr r4, sdrc_power | 589 | ldr r4, sdrc_power |
626 | ldr r5, [r4] | 590 | ldr r5, [r4] |
627 | bic r5, r5, #0x40 | 591 | bic r5, r5, #0x40 |
628 | str r5, [r4] | 592 | str r5, [r4] |
629 | is_dll_in_lock_mode: | 593 | is_dll_in_lock_mode: |
630 | 594 | ||
631 | /* Is dll in lock mode? */ | 595 | /* Is dll in lock mode? */ |
632 | ldr r4, sdrc_dlla_ctrl | 596 | ldr r4, sdrc_dlla_ctrl |
633 | ldr r5, [r4] | 597 | ldr r5, [r4] |
634 | tst r5, #0x4 | 598 | tst r5, #0x4 |
635 | bxne lr | 599 | bxne lr |
636 | /* wait till dll locks */ | 600 | /* wait till dll locks */ |
637 | wait_dll_lock_timed: | 601 | wait_dll_lock_timed: |
638 | ldr r4, wait_dll_lock_counter | 602 | ldr r4, wait_dll_lock_counter |
639 | add r4, r4, #1 | 603 | add r4, r4, #1 |
640 | str r4, wait_dll_lock_counter | 604 | str r4, wait_dll_lock_counter |
641 | ldr r4, sdrc_dlla_status | 605 | ldr r4, sdrc_dlla_status |
642 | mov r6, #8 /* Wait 20uS for lock */ | 606 | mov r6, #8 /* Wait 20uS for lock */ |
643 | wait_dll_lock: | 607 | wait_dll_lock: |
644 | subs r6, r6, #0x1 | 608 | subs r6, r6, #0x1 |
645 | beq kick_dll | 609 | beq kick_dll |
646 | ldr r5, [r4] | 610 | ldr r5, [r4] |
647 | and r5, r5, #0x4 | 611 | and r5, r5, #0x4 |
648 | cmp r5, #0x4 | 612 | cmp r5, #0x4 |
649 | bne wait_dll_lock | 613 | bne wait_dll_lock |
650 | bx lr | 614 | bx lr |
651 | 615 | ||
652 | /* disable/reenable DLL if not locked */ | 616 | /* disable/reenable DLL if not locked */ |
653 | kick_dll: | 617 | kick_dll: |
654 | ldr r4, sdrc_dlla_ctrl | 618 | ldr r4, sdrc_dlla_ctrl |
655 | ldr r5, [r4] | 619 | ldr r5, [r4] |
656 | mov r6, r5 | 620 | mov r6, r5 |
657 | bic r6, #(1<<3) /* disable dll */ | 621 | bic r6, #(1<<3) /* disable dll */ |
658 | str r6, [r4] | 622 | str r6, [r4] |
659 | dsb | 623 | dsb |
660 | orr r6, r6, #(1<<3) /* enable dll */ | 624 | orr r6, r6, #(1<<3) /* enable dll */ |
661 | str r6, [r4] | 625 | str r6, [r4] |
662 | dsb | 626 | dsb |
663 | ldr r4, kick_counter | 627 | ldr r4, kick_counter |
664 | add r4, r4, #1 | 628 | add r4, r4, #1 |
665 | str r4, kick_counter | 629 | str r4, kick_counter |
666 | b wait_dll_lock_timed | 630 | b wait_dll_lock_timed |
667 | 631 | ||
668 | cm_idlest1_core: | 632 | cm_idlest1_core: |
669 | .word CM_IDLEST1_CORE_V | 633 | .word CM_IDLEST1_CORE_V |
670 | cm_idlest_ckgen: | 634 | cm_idlest_ckgen: |
671 | .word CM_IDLEST_CKGEN_V | 635 | .word CM_IDLEST_CKGEN_V |
672 | sdrc_dlla_status: | 636 | sdrc_dlla_status: |
673 | .word SDRC_DLLA_STATUS_V | 637 | .word SDRC_DLLA_STATUS_V |
674 | sdrc_dlla_ctrl: | 638 | sdrc_dlla_ctrl: |
675 | .word SDRC_DLLA_CTRL_V | 639 | .word SDRC_DLLA_CTRL_V |
676 | pm_prepwstst_core: | ||
677 | .word PM_PREPWSTST_CORE_V | ||
678 | pm_prepwstst_core_p: | 640 | pm_prepwstst_core_p: |
679 | .word PM_PREPWSTST_CORE_P | 641 | .word PM_PREPWSTST_CORE_P |
680 | pm_prepwstst_mpu: | ||
681 | .word PM_PREPWSTST_MPU_V | ||
682 | pm_pwstctrl_mpu: | 642 | pm_pwstctrl_mpu: |
683 | .word PM_PWSTCTRL_MPU_P | 643 | .word PM_PWSTCTRL_MPU_P |
684 | scratchpad_base: | 644 | scratchpad_base: |
685 | .word SCRATCHPAD_BASE_P | 645 | .word SCRATCHPAD_BASE_P |
686 | sram_base: | 646 | sram_base: |
687 | .word SRAM_BASE_P + 0x8000 | 647 | .word SRAM_BASE_P + 0x8000 |
688 | sdrc_power: | 648 | sdrc_power: |
689 | .word SDRC_POWER_V | 649 | .word SDRC_POWER_V |
690 | clk_stabilize_delay: | ||
691 | .word 0x000001FF | ||
692 | assoc_mask: | ||
693 | .word 0x3ff | ||
694 | numset_mask: | ||
695 | .word 0x7fff | ||
696 | ttbrbit_mask: | 650 | ttbrbit_mask: |
697 | .word 0xFFFFC000 | 651 | .word 0xFFFFC000 |
698 | table_index_mask: | 652 | table_index_mask: |
699 | .word 0xFFF00000 | 653 | .word 0xFFF00000 |
700 | table_entry: | 654 | table_entry: |
701 | .word 0x00000C02 | 655 | .word 0x00000C02 |
702 | cache_pred_disable_mask: | 656 | cache_pred_disable_mask: |
703 | .word 0xFFFFE7FB | 657 | .word 0xFFFFE7FB |
704 | control_stat: | 658 | control_stat: |
705 | .word CONTROL_STAT | 659 | .word CONTROL_STAT |
706 | control_mem_rta: | 660 | control_mem_rta: |
707 | .word CONTROL_MEM_RTA_CTRL | 661 | .word CONTROL_MEM_RTA_CTRL |
708 | kernel_flush: | 662 | kernel_flush: |
709 | .word v7_flush_dcache_all | 663 | .word v7_flush_dcache_all |
710 | l2dis_3630: | 664 | l2dis_3630: |
711 | .word 0 | 665 | .word 0 |
712 | /* | 666 | /* |
713 | * When exporting to userspace while the counters are in SRAM, | 667 | * When exporting to userspace while the counters are in SRAM, |
714 | * these 2 words need to be at the end to facilitate retrival! | 668 | * these 2 words need to be at the end to facilitate retrival! |
715 | */ | 669 | */ |
716 | kick_counter: | 670 | kick_counter: |
717 | .word 0 | 671 | .word 0 |
718 | wait_dll_lock_counter: | 672 | wait_dll_lock_counter: |
719 | .word 0 | 673 | .word 0 |
720 | ENTRY(omap34xx_cpu_suspend_sz) | 674 | ENTRY(omap34xx_cpu_suspend_sz) |
721 | .word . - omap34xx_cpu_suspend | 675 | .word . - omap34xx_cpu_suspend |
722 | 676 |