1 /* 2 * This file and its contents are supplied under the terms of the 3 * Common Development and Distribution License ("CDDL"), version 1.0. 4 * You may only use this file in accordance with the terms of version 5 * 1.0 of the CDDL. 6 * 7 * A full copy of the text of the CDDL should have accompanied this 8 * source. A copy of the CDDL is also available via the Internet at 9 * http://www.illumos.org/license/CDDL. 10 */ 11 12 /* 13 * Copyright 2013 Joyent, Inc. All rights reserved. 14 */ 15 16 .file "cache.s" 17 18 /* XXXARM: rework cache/tlb maintenance functions to handle ARMv7 */ 19 20 /* 21 * Cache and memory barrier operations 22 */ 23 24 #include <sys/asm_linkage.h> 25 26 #if defined(lint) || defined(__lint) 27 28 void 29 membar_sync(void) 30 {} 31 32 void 33 membar_enter(void) 34 {} 35 36 void 37 membar_exit(void) 38 {} 39 40 void 41 membar_producer(void) 42 {} 43 44 void 45 membar_consumer(void) 46 {} 47 48 void 49 instr_sbarrier(void) 50 {} 51 52 void 53 data_sbarrier(void) 54 {} 55 56 #else /* __lint */ 57 58 /* 59 * NOTE: membar_enter, membar_exit, membar_producer, and 60 * membar_consumer are identical routines. We define them 61 * separately, instead of using ALTENTRY definitions to alias 62 * them together, so that DTrace and debuggers will see a unique 63 * address for them, allowing more accurate tracing. 64 */ 65 ENTRY(membar_enter) 66 ALTENTRY(membar_sync) 67 dmb 68 bx lr 69 SET_SIZE(membar_sync) 70 SET_SIZE(membar_enter) 71 72 ENTRY(membar_exit) 73 dmb 74 bx lr 75 SET_SIZE(membar_exit) 76 77 ENTRY(membar_producer) 78 dmb 79 bx lr 80 SET_SIZE(membar_producer) 81 82 ENTRY(membar_consumer) 83 dmb 84 bx lr 85 SET_SIZE(membar_consumer) 86 87 ENTRY(instr_sbarrier) 88 isb 89 bx lr 90 SET_SIZE(membar_consumer) 91 92 ENTRY(data_sbarrier) 93 isb 94 bx lr 95 SET_SIZE(data_sbarrier) 96 97 #endif /* __lint */ 98 99 #if defined(lint) || defined(__lint) 100 101 /* 102 * The ARM architecture uses a modified Harvard Architecture which means that we 103 * get the joys of fixing up this mess. Primarily this means that when we update 104 * data, it gets written to do the data cache. That needs to be flushed to main 105 * memory and then the instruction cache needs to be invalidated. This is 106 * particularly important for things like krtld and DTrace. While the data cache 107 * does write itself out over time, we cannot rely on it having written itself 108 * out to the state that we care about by the time that we'd like it to. As 109 * such, we need to ensure that it's been flushed out ourselves. This also means 110 * that we could accidentally flush a region of the icache that's already 111 * updated itself, but that's just what we have to do to keep Von Neumann's 112 * spirt and great gift alive. 113 * 114 * The controllers for the caches have a few different options for invalidation. 115 * One may: 116 * 117 * o Invalidate or flush the entire cache 118 * o Invalidate or flush a cache line 119 * o Invalidate or flush a cache range 120 * 121 * We opt to take the third option here for the general case of making sure that 122 * text has been synchronized. While the data cache allows us to both invalidate 123 * and flush the cache line, we don't currently have a need to do the 124 * invalidation. 125 * 126 * Note that all of these operations should be aligned on an 8-byte boundary. 127 * The instructions actually only end up using bits [31:5] of an address. 128 * Callers are required to ensure that this is the case. 129 */ 130 131 void 132 armv7_icache_disable(void) 133 {} 134 135 void 136 armv7_icache_enable(void) 137 {} 138 139 void 140 armv7_dcache_disable(void) 141 {} 142 143 void 144 armv7_dcache_enable(void) 145 {} 146 147 void 148 armv7_icache_inval(void) 149 {} 150 151 void 152 armv7_dcache_inval(void) 153 {} 154 155 void 156 armv7_dcache_flush(void) 157 {} 158 159 void 160 armv7_text_flush_range(caddr_t start, size_t len) 161 {} 162 163 void 164 armv7_text_flush(void) 165 {} 166 167 #else /* __lint */ 168 169 ENTRY(armv7_icache_enable) 170 mrc p15, 0, r0, c1, c0, 0 171 orr r0, #0x1000 172 mcr p15, 0, r0, c1, c0, 0 173 SET_SIZE(armv7_icache_enable) 174 175 ENTRY(armv7_dcache_enable) 176 mrc p15, 0, r0, c1, c0, 0 177 orr r0, #0x4 178 mcr p15, 0, r0, c1, c0, 0 179 SET_SIZE(armv7_dcache_enable) 180 181 ENTRY(armv7_icache_disable) 182 mrc p15, 0, r0, c1, c0, 0 183 bic r0, #0x1000 184 mcr p15, 0, r0, c1, c0, 0 185 SET_SIZE(armv7_icache_disable) 186 187 ENTRY(armv7_dcache_disable) 188 mrc p15, 0, r0, c1, c0, 0 189 bic r0, #0x4 190 mcr p15, 0, r0, c1, c0, 0 191 SET_SIZE(armv7_dcache_disable) 192 193 ENTRY(armv7_icache_inval) 194 mov r0, #0 195 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache 196 bx lr 197 SET_SIZE(armv7_icache_inval) 198 199 ENTRY(armv7_dcache_inval) 200 mov r0, #0 201 mcr p15, 0, r0, c7, c6, 0 @ Invalidate d-cache 202 dsb 203 bx lr 204 SET_SIZE(armv7_dcache_inval) 205 206 ENTRY(armv7_dcache_flush) 207 mov r0, #0 208 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 209 dsb 210 bx lr 211 SET_SIZE(armv7_dcache_flush) 212 213 ENTRY(armv7_text_flush_range) 214 add r1, r1, r0 215 sub r1, r1, r0 216 mcrr p15, 0, r1, r0, c5 @ Invalidate i-cache range 217 mcrr p15, 0, r1, r0, c12 @ Flush d-cache range 218 dsb 219 isb 220 bx lr 221 SET_SIZE(armv7_text_flush_range) 222 223 ENTRY(armv7_text_flush) 224 mov r0, #0 225 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache 226 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 227 dsb 228 isb 229 bx lr 230 SET_SIZE(armv7_text_flush) 231 232 #endif 233 234 #ifdef __lint 235 236 /* 237 * Perform all of the operations necessary for tlb maintenance after an update 238 * to the page tables. 239 */ 240 void 241 armv7_tlb_sync(void) 242 {} 243 244 #else /* __lint */ 245 246 ENTRY(armv7_tlb_sync) 247 mov r0, #0 248 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 249 dsb 250 mcr p15, 0, r0, c8, c7, 0 @ invalidate tlb 251 mcr p15, 0, r0, c8, c5, 0 @ Invalidate I-cache + btc 252 dsb 253 isb 254 bx lr 255 SET_SIZE(armv7_tlb_sync) 256 257 #endif /* __lint */