1 /* 2 * This file and its contents are supplied under the terms of the 3 * Common Development and Distribution License ("CDDL"), version 1.0. 4 * You may only use this file in accordance with the terms of version 5 * 1.0 of the CDDL. 6 * 7 * A full copy of the text of the CDDL should have accompanied this 8 * source. A copy of the CDDL is also available via the Internet at 9 * http://www.illumos.org/license/CDDL. 10 */ 11 12 /* 13 * Copyright 2013 Joyent, Inc. All rights reserved. 14 */ 15 16 .file "cache.s" 17 18 /* XXXARM: rework cache/tlb maintenance functions to handle ARMv7 */ 19 20 /* 21 * Cache and memory barrier operations 22 */ 23 24 #include <sys/asm_linkage.h> 25 26 #if defined(lint) || defined(__lint) 27 28 void 29 membar_sync(void) 30 {} 31 32 void 33 membar_enter(void) 34 {} 35 36 void 37 membar_exit(void) 38 {} 39 40 void 41 membar_producer(void) 42 {} 43 44 void 45 membar_consumer(void) 46 {} 47 48 void 49 instr_sbarrier(void) 50 {} 51 52 void 53 data_sbarrier(void) 54 {} 55 56 #else /* __lint */ 57 58 /* 59 * NOTE: membar_enter, membar_exit, membar_producer, and 60 * membar_consumer are identical routines. We define them 61 * separately, instead of using ALTENTRY definitions to alias 62 * them together, so that DTrace and debuggers will see a unique 63 * address for them, allowing more accurate tracing. 64 */ 65 ENTRY(membar_enter) 66 ALTENTRY(membar_sync) 67 dmb 68 bx lr 69 SET_SIZE(membar_sync) 70 SET_SIZE(membar_enter) 71 72 ENTRY(membar_exit) 73 dmb 74 bx lr 75 SET_SIZE(membar_exit) 76 77 ENTRY(membar_producer) 78 dmb 79 bx lr 80 SET_SIZE(membar_producer) 81 82 ENTRY(membar_consumer) 83 dmb 84 bx lr 85 SET_SIZE(membar_consumer) 86 87 ENTRY(instr_sbarrier) 88 isb 89 bx lr 90 SET_SIZE(membar_consumer) 91 92 ENTRY(data_sbarrier) 93 isb 94 bx lr 95 SET_SIZE(data_sbarrier) 96 97 #endif /* __lint */ 98 99 #if defined(lint) || defined(__lint) 100 101 /* The ARM architecture uses a modified Harvard Architecture which means that we 102 * get the joys of fixing up this mess. Primarily this means that when we update 103 * data, it gets written to do the data cache. That needs to be flushed to main 104 * memory and then the instruction cache needs to be invalidated. This is 105 * particularly important for things like krtld and DTrace. While the data cache 106 * does write itself out over time, we cannot rely on it having written itself 107 * out to the state that we care about by the time that we'd like it to. As 108 * such, we need to ensure that it's been flushed out ourselves. This also means 109 * that we could accidentally flush a region of the icache that's already 110 * updated itself, but that's just what we have to do to keep Von Neumann's 111 * spirt and great gift alive. 112 * 113 * The controllers for the caches have a few different options for invalidation. 114 * One may: 115 * 116 * o Invalidate or flush the entire cache 117 * o Invalidate or flush a cache line 118 * o Invalidate or flush a cache range 119 * 120 * We opt to take the third option here for the general case of making sure that 121 * text has been synchronized. While the data cache allows us to both invalidate 122 * and flush the cache line, we don't currently have a need to do the 123 * invalidation. 124 * 125 * Note that all of these operations should be aligned on an 8-byte boundary. 126 * The instructions actually only end up using bits [31:5] of an address. 127 * Callers are required to ensure that this is the case. 128 */ 129 130 void 131 armv7_icache_disable(void) 132 {} 133 134 void 135 armv7_icache_enable(void) 136 {} 137 138 void 139 armv7_dcache_disable(void) 140 {} 141 142 void 143 armv7_dcache_enable(void) 144 {} 145 146 void 147 armv7_icache_inval(void) 148 {} 149 150 void 151 armv7_dcache_inval(void) 152 {} 153 154 void 155 armv7_dcache_flush(void) 156 {} 157 158 void 159 armv7_text_flush_range(caddr_t start, size_t len) 160 {} 161 162 void 163 armv7_text_flush(void) 164 {} 165 166 #else /* __lint */ 167 168 ENTRY(armv7_icache_enable) 169 mrc p15, 0, r0, c1, c0, 0 170 orr r0, #0x1000 171 mcr p15, 0, r0, c1, c0, 0 172 SET_SIZE(armv7_icache_enable) 173 174 ENTRY(armv7_dcache_enable) 175 mrc p15, 0, r0, c1, c0, 0 176 orr r0, #0x4 177 mcr p15, 0, r0, c1, c0, 0 178 SET_SIZE(armv7_dcache_enable) 179 180 ENTRY(armv7_icache_disable) 181 mrc p15, 0, r0, c1, c0, 0 182 bic r0, #0x1000 183 mcr p15, 0, r0, c1, c0, 0 184 SET_SIZE(armv7_icache_disable) 185 186 ENTRY(armv7_dcache_disable) 187 mrc p15, 0, r0, c1, c0, 0 188 bic r0, #0x4 189 mcr p15, 0, r0, c1, c0, 0 190 SET_SIZE(armv7_dcache_disable) 191 192 ENTRY(armv7_icache_inval) 193 mov r0, #0 194 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache 195 bx lr 196 SET_SIZE(armv7_icache_inval) 197 198 ENTRY(armv7_dcache_inval) 199 mov r0, #0 200 mcr p15, 0, r0, c7, c6, 0 @ Invalidate d-cache 201 dsb 202 bx lr 203 SET_SIZE(armv7_dcache_inval) 204 205 ENTRY(armv7_dcache_flush) 206 mov r0, #0 207 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 208 dsb 209 bx lr 210 SET_SIZE(armv7_dcache_flush) 211 212 ENTRY(armv7_text_flush_range) 213 add r1, r1, r0 214 sub r1, r1, r0 215 mcrr p15, 0, r1, r0, c5 @ Invalidate i-cache range 216 mcrr p15, 0, r1, r0, c12 @ Flush d-cache range 217 dsb 218 isb 219 bx lr 220 SET_SIZE(armv7_text_flush_range) 221 222 ENTRY(armv7_text_flush) 223 mov r0, #0 224 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache 225 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 226 dsb 227 isb 228 bx lr 229 SET_SIZE(armv7_text_flush) 230 231 #endif 232 233 #ifdef __lint 234 235 /* 236 * Perform all of the operations necessary for tlb maintenance after an update 237 * to the page tables. 238 */ 239 void 240 armv7_tlb_sync(void) 241 {} 242 243 #else /* __lint */ 244 245 ENTRY(armv7_tlb_sync) 246 mov r0, #0 247 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache 248 dsb 249 mcr p15, 0, r0, c8, c7, 0 @ invalidate tlb 250 mcr p15, 0, r0, c8, c5, 0 @ Invalidate I-cache + btc 251 dsb 252 isb 253 bx lr 254 SET_SIZE(armv7_tlb_sync) 255 256 #endif /* __lint */