Print this page
5043 remove deprecated atomic functions' prototypes
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/common/atomic/amd64/atomic.s
+++ new/usr/src/common/atomic/amd64/atomic.s
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21
↓ open down ↓ |
21 lines elided |
↑ open up ↑ |
22 22 /*
23 23 * Copyright (c) 2004, 2010, Oracle and/or its affiliates. All rights reserved.
24 24 */
25 25
26 26 .file "atomic.s"
27 27
28 28 #include <sys/asm_linkage.h>
29 29
30 30 #if defined(_KERNEL)
31 31 /*
32 - * Legacy kernel interfaces; they will go away (eventually).
32 + * Legacy kernel interfaces; they will go away the moment our closed
33 + * bins no longer require them.
33 34 */
34 35 ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
35 36 ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
36 37 ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
37 38 ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
38 39 ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
39 40 ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
40 41 ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
41 42 #endif
42 43
43 44 ENTRY(atomic_inc_8)
44 45 ALTENTRY(atomic_inc_uchar)
45 46 lock
46 47 incb (%rdi)
47 48 ret
48 49 SET_SIZE(atomic_inc_uchar)
49 50 SET_SIZE(atomic_inc_8)
50 51
51 52 ENTRY(atomic_inc_16)
52 53 ALTENTRY(atomic_inc_ushort)
53 54 lock
54 55 incw (%rdi)
55 56 ret
56 57 SET_SIZE(atomic_inc_ushort)
57 58 SET_SIZE(atomic_inc_16)
58 59
59 60 ENTRY(atomic_inc_32)
60 61 ALTENTRY(atomic_inc_uint)
61 62 lock
62 63 incl (%rdi)
63 64 ret
64 65 SET_SIZE(atomic_inc_uint)
65 66 SET_SIZE(atomic_inc_32)
66 67
67 68 ENTRY(atomic_inc_64)
68 69 ALTENTRY(atomic_inc_ulong)
69 70 lock
70 71 incq (%rdi)
71 72 ret
72 73 SET_SIZE(atomic_inc_ulong)
73 74 SET_SIZE(atomic_inc_64)
74 75
75 76 ENTRY(atomic_inc_8_nv)
76 77 ALTENTRY(atomic_inc_uchar_nv)
77 78 xorl %eax, %eax / clear upper bits of %eax return register
78 79 incb %al / %al = 1
79 80 lock
80 81 xaddb %al, (%rdi) / %al = old value, (%rdi) = new value
81 82 incb %al / return new value
82 83 ret
83 84 SET_SIZE(atomic_inc_uchar_nv)
84 85 SET_SIZE(atomic_inc_8_nv)
85 86
86 87 ENTRY(atomic_inc_16_nv)
87 88 ALTENTRY(atomic_inc_ushort_nv)
88 89 xorl %eax, %eax / clear upper bits of %eax return register
89 90 incw %ax / %ax = 1
90 91 lock
91 92 xaddw %ax, (%rdi) / %ax = old value, (%rdi) = new value
92 93 incw %ax / return new value
93 94 ret
94 95 SET_SIZE(atomic_inc_ushort_nv)
95 96 SET_SIZE(atomic_inc_16_nv)
96 97
97 98 ENTRY(atomic_inc_32_nv)
98 99 ALTENTRY(atomic_inc_uint_nv)
99 100 xorl %eax, %eax / %eax = 0
100 101 incl %eax / %eax = 1
101 102 lock
102 103 xaddl %eax, (%rdi) / %eax = old value, (%rdi) = new value
103 104 incl %eax / return new value
104 105 ret
105 106 SET_SIZE(atomic_inc_uint_nv)
106 107 SET_SIZE(atomic_inc_32_nv)
107 108
108 109 ENTRY(atomic_inc_64_nv)
109 110 ALTENTRY(atomic_inc_ulong_nv)
110 111 xorq %rax, %rax / %rax = 0
111 112 incq %rax / %rax = 1
112 113 lock
113 114 xaddq %rax, (%rdi) / %rax = old value, (%rdi) = new value
114 115 incq %rax / return new value
115 116 ret
116 117 SET_SIZE(atomic_inc_ulong_nv)
117 118 SET_SIZE(atomic_inc_64_nv)
118 119
119 120 ENTRY(atomic_dec_8)
120 121 ALTENTRY(atomic_dec_uchar)
121 122 lock
122 123 decb (%rdi)
123 124 ret
124 125 SET_SIZE(atomic_dec_uchar)
125 126 SET_SIZE(atomic_dec_8)
126 127
127 128 ENTRY(atomic_dec_16)
128 129 ALTENTRY(atomic_dec_ushort)
129 130 lock
130 131 decw (%rdi)
131 132 ret
132 133 SET_SIZE(atomic_dec_ushort)
133 134 SET_SIZE(atomic_dec_16)
134 135
135 136 ENTRY(atomic_dec_32)
136 137 ALTENTRY(atomic_dec_uint)
137 138 lock
138 139 decl (%rdi)
139 140 ret
140 141 SET_SIZE(atomic_dec_uint)
141 142 SET_SIZE(atomic_dec_32)
142 143
143 144 ENTRY(atomic_dec_64)
144 145 ALTENTRY(atomic_dec_ulong)
145 146 lock
146 147 decq (%rdi)
147 148 ret
148 149 SET_SIZE(atomic_dec_ulong)
149 150 SET_SIZE(atomic_dec_64)
150 151
151 152 ENTRY(atomic_dec_8_nv)
152 153 ALTENTRY(atomic_dec_uchar_nv)
153 154 xorl %eax, %eax / clear upper bits of %eax return register
154 155 decb %al / %al = -1
155 156 lock
156 157 xaddb %al, (%rdi) / %al = old value, (%rdi) = new value
157 158 decb %al / return new value
158 159 ret
159 160 SET_SIZE(atomic_dec_uchar_nv)
160 161 SET_SIZE(atomic_dec_8_nv)
161 162
162 163 ENTRY(atomic_dec_16_nv)
163 164 ALTENTRY(atomic_dec_ushort_nv)
164 165 xorl %eax, %eax / clear upper bits of %eax return register
165 166 decw %ax / %ax = -1
166 167 lock
167 168 xaddw %ax, (%rdi) / %ax = old value, (%rdi) = new value
168 169 decw %ax / return new value
169 170 ret
170 171 SET_SIZE(atomic_dec_ushort_nv)
171 172 SET_SIZE(atomic_dec_16_nv)
172 173
173 174 ENTRY(atomic_dec_32_nv)
174 175 ALTENTRY(atomic_dec_uint_nv)
175 176 xorl %eax, %eax / %eax = 0
176 177 decl %eax / %eax = -1
177 178 lock
178 179 xaddl %eax, (%rdi) / %eax = old value, (%rdi) = new value
179 180 decl %eax / return new value
180 181 ret
181 182 SET_SIZE(atomic_dec_uint_nv)
182 183 SET_SIZE(atomic_dec_32_nv)
183 184
184 185 ENTRY(atomic_dec_64_nv)
185 186 ALTENTRY(atomic_dec_ulong_nv)
186 187 xorq %rax, %rax / %rax = 0
187 188 decq %rax / %rax = -1
188 189 lock
189 190 xaddq %rax, (%rdi) / %rax = old value, (%rdi) = new value
190 191 decq %rax / return new value
191 192 ret
192 193 SET_SIZE(atomic_dec_ulong_nv)
193 194 SET_SIZE(atomic_dec_64_nv)
194 195
195 196 ENTRY(atomic_add_8)
196 197 ALTENTRY(atomic_add_char)
197 198 lock
198 199 addb %sil, (%rdi)
199 200 ret
200 201 SET_SIZE(atomic_add_char)
201 202 SET_SIZE(atomic_add_8)
202 203
203 204 ENTRY(atomic_add_16)
204 205 ALTENTRY(atomic_add_short)
205 206 lock
206 207 addw %si, (%rdi)
207 208 ret
208 209 SET_SIZE(atomic_add_short)
209 210 SET_SIZE(atomic_add_16)
210 211
211 212 ENTRY(atomic_add_32)
212 213 ALTENTRY(atomic_add_int)
213 214 lock
214 215 addl %esi, (%rdi)
215 216 ret
216 217 SET_SIZE(atomic_add_int)
217 218 SET_SIZE(atomic_add_32)
218 219
219 220 ENTRY(atomic_add_64)
220 221 ALTENTRY(atomic_add_ptr)
221 222 ALTENTRY(atomic_add_long)
222 223 lock
223 224 addq %rsi, (%rdi)
224 225 ret
225 226 SET_SIZE(atomic_add_long)
226 227 SET_SIZE(atomic_add_ptr)
227 228 SET_SIZE(atomic_add_64)
228 229
229 230 ENTRY(atomic_or_8)
230 231 ALTENTRY(atomic_or_uchar)
231 232 lock
232 233 orb %sil, (%rdi)
233 234 ret
234 235 SET_SIZE(atomic_or_uchar)
235 236 SET_SIZE(atomic_or_8)
236 237
237 238 ENTRY(atomic_or_16)
238 239 ALTENTRY(atomic_or_ushort)
239 240 lock
240 241 orw %si, (%rdi)
241 242 ret
242 243 SET_SIZE(atomic_or_ushort)
243 244 SET_SIZE(atomic_or_16)
244 245
245 246 ENTRY(atomic_or_32)
246 247 ALTENTRY(atomic_or_uint)
247 248 lock
248 249 orl %esi, (%rdi)
249 250 ret
250 251 SET_SIZE(atomic_or_uint)
251 252 SET_SIZE(atomic_or_32)
252 253
253 254 ENTRY(atomic_or_64)
254 255 ALTENTRY(atomic_or_ulong)
255 256 lock
256 257 orq %rsi, (%rdi)
257 258 ret
258 259 SET_SIZE(atomic_or_ulong)
259 260 SET_SIZE(atomic_or_64)
260 261
261 262 ENTRY(atomic_and_8)
262 263 ALTENTRY(atomic_and_uchar)
263 264 lock
264 265 andb %sil, (%rdi)
265 266 ret
266 267 SET_SIZE(atomic_and_uchar)
267 268 SET_SIZE(atomic_and_8)
268 269
269 270 ENTRY(atomic_and_16)
270 271 ALTENTRY(atomic_and_ushort)
271 272 lock
272 273 andw %si, (%rdi)
273 274 ret
274 275 SET_SIZE(atomic_and_ushort)
275 276 SET_SIZE(atomic_and_16)
276 277
277 278 ENTRY(atomic_and_32)
278 279 ALTENTRY(atomic_and_uint)
279 280 lock
280 281 andl %esi, (%rdi)
281 282 ret
282 283 SET_SIZE(atomic_and_uint)
283 284 SET_SIZE(atomic_and_32)
284 285
285 286 ENTRY(atomic_and_64)
286 287 ALTENTRY(atomic_and_ulong)
287 288 lock
288 289 andq %rsi, (%rdi)
289 290 ret
290 291 SET_SIZE(atomic_and_ulong)
291 292 SET_SIZE(atomic_and_64)
292 293
293 294 ENTRY(atomic_add_8_nv)
294 295 ALTENTRY(atomic_add_char_nv)
295 296 movzbl %sil, %eax / %al = delta addend, clear upper bits
296 297 lock
297 298 xaddb %sil, (%rdi) / %sil = old value, (%rdi) = sum
298 299 addb %sil, %al / new value = original value + delta
299 300 ret
300 301 SET_SIZE(atomic_add_char_nv)
301 302 SET_SIZE(atomic_add_8_nv)
302 303
303 304 ENTRY(atomic_add_16_nv)
304 305 ALTENTRY(atomic_add_short_nv)
305 306 movzwl %si, %eax / %ax = delta addend, clean upper bits
306 307 lock
307 308 xaddw %si, (%rdi) / %si = old value, (%rdi) = sum
308 309 addw %si, %ax / new value = original value + delta
309 310 ret
310 311 SET_SIZE(atomic_add_short_nv)
311 312 SET_SIZE(atomic_add_16_nv)
312 313
313 314 ENTRY(atomic_add_32_nv)
314 315 ALTENTRY(atomic_add_int_nv)
315 316 mov %esi, %eax / %eax = delta addend
316 317 lock
317 318 xaddl %esi, (%rdi) / %esi = old value, (%rdi) = sum
318 319 add %esi, %eax / new value = original value + delta
319 320 ret
320 321 SET_SIZE(atomic_add_int_nv)
321 322 SET_SIZE(atomic_add_32_nv)
322 323
323 324 ENTRY(atomic_add_64_nv)
324 325 ALTENTRY(atomic_add_ptr_nv)
325 326 ALTENTRY(atomic_add_long_nv)
326 327 mov %rsi, %rax / %rax = delta addend
327 328 lock
328 329 xaddq %rsi, (%rdi) / %rsi = old value, (%rdi) = sum
329 330 addq %rsi, %rax / new value = original value + delta
330 331 ret
331 332 SET_SIZE(atomic_add_long_nv)
332 333 SET_SIZE(atomic_add_ptr_nv)
333 334 SET_SIZE(atomic_add_64_nv)
334 335
335 336 ENTRY(atomic_and_8_nv)
336 337 ALTENTRY(atomic_and_uchar_nv)
337 338 movb (%rdi), %al / %al = old value
338 339 1:
339 340 movb %sil, %cl
340 341 andb %al, %cl / %cl = new value
341 342 lock
342 343 cmpxchgb %cl, (%rdi) / try to stick it in
343 344 jne 1b
344 345 movzbl %cl, %eax / return new value
345 346 ret
346 347 SET_SIZE(atomic_and_uchar_nv)
347 348 SET_SIZE(atomic_and_8_nv)
348 349
349 350 ENTRY(atomic_and_16_nv)
350 351 ALTENTRY(atomic_and_ushort_nv)
351 352 movw (%rdi), %ax / %ax = old value
352 353 1:
353 354 movw %si, %cx
354 355 andw %ax, %cx / %cx = new value
355 356 lock
356 357 cmpxchgw %cx, (%rdi) / try to stick it in
357 358 jne 1b
358 359 movzwl %cx, %eax / return new value
359 360 ret
360 361 SET_SIZE(atomic_and_ushort_nv)
361 362 SET_SIZE(atomic_and_16_nv)
362 363
363 364 ENTRY(atomic_and_32_nv)
364 365 ALTENTRY(atomic_and_uint_nv)
365 366 movl (%rdi), %eax
366 367 1:
367 368 movl %esi, %ecx
368 369 andl %eax, %ecx
369 370 lock
370 371 cmpxchgl %ecx, (%rdi)
371 372 jne 1b
372 373 movl %ecx, %eax
373 374 ret
374 375 SET_SIZE(atomic_and_uint_nv)
375 376 SET_SIZE(atomic_and_32_nv)
376 377
377 378 ENTRY(atomic_and_64_nv)
378 379 ALTENTRY(atomic_and_ulong_nv)
379 380 movq (%rdi), %rax
380 381 1:
381 382 movq %rsi, %rcx
382 383 andq %rax, %rcx
383 384 lock
384 385 cmpxchgq %rcx, (%rdi)
385 386 jne 1b
386 387 movq %rcx, %rax
387 388 ret
388 389 SET_SIZE(atomic_and_ulong_nv)
389 390 SET_SIZE(atomic_and_64_nv)
390 391
391 392 ENTRY(atomic_or_8_nv)
392 393 ALTENTRY(atomic_or_uchar_nv)
393 394 movb (%rdi), %al / %al = old value
394 395 1:
395 396 movb %sil, %cl
396 397 orb %al, %cl / %cl = new value
397 398 lock
398 399 cmpxchgb %cl, (%rdi) / try to stick it in
399 400 jne 1b
400 401 movzbl %cl, %eax / return new value
401 402 ret
402 403 SET_SIZE(atomic_or_uchar_nv)
403 404 SET_SIZE(atomic_or_8_nv)
404 405
405 406 ENTRY(atomic_or_16_nv)
406 407 ALTENTRY(atomic_or_ushort_nv)
407 408 movw (%rdi), %ax / %ax = old value
408 409 1:
409 410 movw %si, %cx
410 411 orw %ax, %cx / %cx = new value
411 412 lock
412 413 cmpxchgw %cx, (%rdi) / try to stick it in
413 414 jne 1b
414 415 movzwl %cx, %eax / return new value
415 416 ret
416 417 SET_SIZE(atomic_or_ushort_nv)
417 418 SET_SIZE(atomic_or_16_nv)
418 419
419 420 ENTRY(atomic_or_32_nv)
420 421 ALTENTRY(atomic_or_uint_nv)
421 422 movl (%rdi), %eax
422 423 1:
423 424 movl %esi, %ecx
424 425 orl %eax, %ecx
425 426 lock
426 427 cmpxchgl %ecx, (%rdi)
427 428 jne 1b
428 429 movl %ecx, %eax
429 430 ret
430 431 SET_SIZE(atomic_or_uint_nv)
431 432 SET_SIZE(atomic_or_32_nv)
432 433
433 434 ENTRY(atomic_or_64_nv)
434 435 ALTENTRY(atomic_or_ulong_nv)
435 436 movq (%rdi), %rax
436 437 1:
437 438 movq %rsi, %rcx
438 439 orq %rax, %rcx
439 440 lock
440 441 cmpxchgq %rcx, (%rdi)
441 442 jne 1b
442 443 movq %rcx, %rax
443 444 ret
444 445 SET_SIZE(atomic_or_ulong_nv)
445 446 SET_SIZE(atomic_or_64_nv)
446 447
447 448 ENTRY(atomic_cas_8)
448 449 ALTENTRY(atomic_cas_uchar)
449 450 movzbl %sil, %eax
450 451 lock
451 452 cmpxchgb %dl, (%rdi)
452 453 ret
453 454 SET_SIZE(atomic_cas_uchar)
454 455 SET_SIZE(atomic_cas_8)
455 456
456 457 ENTRY(atomic_cas_16)
457 458 ALTENTRY(atomic_cas_ushort)
458 459 movzwl %si, %eax
459 460 lock
460 461 cmpxchgw %dx, (%rdi)
461 462 ret
462 463 SET_SIZE(atomic_cas_ushort)
463 464 SET_SIZE(atomic_cas_16)
464 465
465 466 ENTRY(atomic_cas_32)
466 467 ALTENTRY(atomic_cas_uint)
467 468 movl %esi, %eax
468 469 lock
469 470 cmpxchgl %edx, (%rdi)
470 471 ret
471 472 SET_SIZE(atomic_cas_uint)
472 473 SET_SIZE(atomic_cas_32)
473 474
474 475 ENTRY(atomic_cas_64)
475 476 ALTENTRY(atomic_cas_ulong)
476 477 ALTENTRY(atomic_cas_ptr)
477 478 movq %rsi, %rax
478 479 lock
479 480 cmpxchgq %rdx, (%rdi)
480 481 ret
481 482 SET_SIZE(atomic_cas_ptr)
482 483 SET_SIZE(atomic_cas_ulong)
483 484 SET_SIZE(atomic_cas_64)
484 485
485 486 ENTRY(atomic_swap_8)
486 487 ALTENTRY(atomic_swap_uchar)
487 488 movzbl %sil, %eax
488 489 lock
489 490 xchgb %al, (%rdi)
490 491 ret
491 492 SET_SIZE(atomic_swap_uchar)
492 493 SET_SIZE(atomic_swap_8)
493 494
494 495 ENTRY(atomic_swap_16)
495 496 ALTENTRY(atomic_swap_ushort)
496 497 movzwl %si, %eax
497 498 lock
498 499 xchgw %ax, (%rdi)
499 500 ret
500 501 SET_SIZE(atomic_swap_ushort)
501 502 SET_SIZE(atomic_swap_16)
502 503
503 504 ENTRY(atomic_swap_32)
504 505 ALTENTRY(atomic_swap_uint)
505 506 movl %esi, %eax
506 507 lock
507 508 xchgl %eax, (%rdi)
508 509 ret
509 510 SET_SIZE(atomic_swap_uint)
510 511 SET_SIZE(atomic_swap_32)
511 512
512 513 ENTRY(atomic_swap_64)
513 514 ALTENTRY(atomic_swap_ulong)
514 515 ALTENTRY(atomic_swap_ptr)
515 516 movq %rsi, %rax
516 517 lock
517 518 xchgq %rax, (%rdi)
518 519 ret
519 520 SET_SIZE(atomic_swap_ptr)
520 521 SET_SIZE(atomic_swap_ulong)
521 522 SET_SIZE(atomic_swap_64)
522 523
523 524 ENTRY(atomic_set_long_excl)
524 525 xorl %eax, %eax
525 526 lock
526 527 btsq %rsi, (%rdi)
527 528 jnc 1f
528 529 decl %eax / return -1
529 530 1:
530 531 ret
531 532 SET_SIZE(atomic_set_long_excl)
532 533
533 534 ENTRY(atomic_clear_long_excl)
534 535 xorl %eax, %eax
535 536 lock
536 537 btrq %rsi, (%rdi)
537 538 jc 1f
538 539 decl %eax / return -1
539 540 1:
540 541 ret
541 542 SET_SIZE(atomic_clear_long_excl)
542 543
543 544 #if !defined(_KERNEL)
544 545
545 546 /*
546 547 * NOTE: membar_enter, and membar_exit are identical routines.
547 548 * We define them separately, instead of using an ALTENTRY
548 549 * definitions to alias them together, so that DTrace and
549 550 * debuggers will see a unique address for them, allowing
550 551 * more accurate tracing.
551 552 */
552 553
553 554 ENTRY(membar_enter)
554 555 mfence
555 556 ret
556 557 SET_SIZE(membar_enter)
557 558
558 559 ENTRY(membar_exit)
559 560 mfence
560 561 ret
561 562 SET_SIZE(membar_exit)
562 563
563 564 ENTRY(membar_producer)
564 565 sfence
565 566 ret
566 567 SET_SIZE(membar_producer)
567 568
568 569 ENTRY(membar_consumer)
569 570 lfence
570 571 ret
571 572 SET_SIZE(membar_consumer)
572 573
573 574 #endif /* !_KERNEL */
↓ open down ↓ |
531 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX