1 /*
   2  * CDDL HEADER START
   3  *
   4  * The contents of this file are subject to the terms of the
   5  * Common Development and Distribution License (the "License").
   6  * You may not use this file except in compliance with the License.
   7  *
   8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
   9  * or http://www.opensolaris.org/os/licensing.
  10  * See the License for the specific language governing permissions
  11  * and limitations under the License.
  12  *
  13  * When distributing Covered Code, include this CDDL HEADER in each
  14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
  15  * If applicable, add the following below this CDDL HEADER, with the
  16  * fields enclosed by brackets "[]" replaced with your own identifying
  17  * information: Portions Copyright [yyyy] [name of copyright owner]
  18  *
  19  * CDDL HEADER END
  20  */
  21 
  22 /*
  23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
  24  * Use is subject to license terms.
  25  */
  26 
  27         .file   "atomic.s"
  28 
  29 #include <sys/asm_linkage.h>
  30 
  31 #if defined(_KERNEL)
  32         /*
  33          * Legacy kernel interfaces; they will go away the moment our closed
  34          * bins no longer require them.
  35          */
  36         ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
  37         ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
  38         ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
  39         ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
  40         ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
  41         ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
  42         ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
  43 #endif
  44 
  45         ENTRY(atomic_inc_8)
  46         ALTENTRY(atomic_inc_uchar)
  47         movl    4(%esp), %eax
  48         lock
  49         incb    (%eax)
  50         ret
  51         SET_SIZE(atomic_inc_uchar)
  52         SET_SIZE(atomic_inc_8)
  53 
  54         ENTRY(atomic_inc_16)
  55         ALTENTRY(atomic_inc_ushort)
  56         movl    4(%esp), %eax
  57         lock
  58         incw    (%eax)
  59         ret
  60         SET_SIZE(atomic_inc_ushort)
  61         SET_SIZE(atomic_inc_16)
  62 
  63         ENTRY(atomic_inc_32)
  64         ALTENTRY(atomic_inc_uint)
  65         ALTENTRY(atomic_inc_ulong)
  66         movl    4(%esp), %eax
  67         lock
  68         incl    (%eax)
  69         ret
  70         SET_SIZE(atomic_inc_ulong)
  71         SET_SIZE(atomic_inc_uint)
  72         SET_SIZE(atomic_inc_32)
  73 
  74         ENTRY(atomic_inc_8_nv)
  75         ALTENTRY(atomic_inc_uchar_nv)
  76         movl    4(%esp), %edx   / %edx = target address
  77         xorl    %eax, %eax      / clear upper bits of %eax
  78         incb    %al             / %al = 1
  79         lock
  80           xaddb %al, (%edx)     / %al = old value, inc (%edx)
  81         incb    %al     / return new value
  82         ret
  83         SET_SIZE(atomic_inc_uchar_nv)
  84         SET_SIZE(atomic_inc_8_nv)
  85 
  86         ENTRY(atomic_inc_16_nv)
  87         ALTENTRY(atomic_inc_ushort_nv)
  88         movl    4(%esp), %edx   / %edx = target address
  89         xorl    %eax, %eax      / clear upper bits of %eax
  90         incw    %ax             / %ax = 1
  91         lock
  92           xaddw %ax, (%edx)     / %ax = old value, inc (%edx)
  93         incw    %ax             / return new value
  94         ret
  95         SET_SIZE(atomic_inc_ushort_nv)
  96         SET_SIZE(atomic_inc_16_nv)
  97 
  98         ENTRY(atomic_inc_32_nv)
  99         ALTENTRY(atomic_inc_uint_nv)
 100         ALTENTRY(atomic_inc_ulong_nv)
 101         movl    4(%esp), %edx   / %edx = target address
 102         xorl    %eax, %eax      / %eax = 0
 103         incl    %eax            / %eax = 1
 104         lock
 105           xaddl %eax, (%edx)    / %eax = old value, inc (%edx)
 106         incl    %eax            / return new value
 107         ret
 108         SET_SIZE(atomic_inc_ulong_nv)
 109         SET_SIZE(atomic_inc_uint_nv)
 110         SET_SIZE(atomic_inc_32_nv)
 111 
 112         /*
 113          * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
 114          * separated, you need to also edit the libc i386 platform
 115          * specific mapfile and remove the NODYNSORT attribute
 116          * from atomic_inc_64_nv.
 117          */
 118         ENTRY(atomic_inc_64)
 119         ALTENTRY(atomic_inc_64_nv)
 120         pushl   %edi
 121         pushl   %ebx
 122         movl    12(%esp), %edi  / %edi = target address
 123         movl    (%edi), %eax
 124         movl    4(%edi), %edx   / %edx:%eax = old value
 125 1:
 126         xorl    %ebx, %ebx
 127         xorl    %ecx, %ecx
 128         incl    %ebx            / %ecx:%ebx = 1
 129         addl    %eax, %ebx
 130         adcl    %edx, %ecx      / add in the carry from inc
 131         lock
 132         cmpxchg8b (%edi)        / try to stick it in
 133         jne     1b
 134         movl    %ebx, %eax
 135         movl    %ecx, %edx      / return new value
 136         popl    %ebx
 137         popl    %edi
 138         ret
 139         SET_SIZE(atomic_inc_64_nv)
 140         SET_SIZE(atomic_inc_64)
 141 
 142         ENTRY(atomic_dec_8)
 143         ALTENTRY(atomic_dec_uchar)
 144         movl    4(%esp), %eax
 145         lock
 146         decb    (%eax)
 147         ret
 148         SET_SIZE(atomic_dec_uchar)
 149         SET_SIZE(atomic_dec_8)
 150 
 151         ENTRY(atomic_dec_16)
 152         ALTENTRY(atomic_dec_ushort)
 153         movl    4(%esp), %eax
 154         lock
 155         decw    (%eax)
 156         ret
 157         SET_SIZE(atomic_dec_ushort)
 158         SET_SIZE(atomic_dec_16)
 159 
 160         ENTRY(atomic_dec_32)
 161         ALTENTRY(atomic_dec_uint)
 162         ALTENTRY(atomic_dec_ulong)
 163         movl    4(%esp), %eax
 164         lock
 165         decl    (%eax)
 166         ret
 167         SET_SIZE(atomic_dec_ulong)
 168         SET_SIZE(atomic_dec_uint)
 169         SET_SIZE(atomic_dec_32)
 170 
 171         ENTRY(atomic_dec_8_nv)
 172         ALTENTRY(atomic_dec_uchar_nv)
 173         movl    4(%esp), %edx   / %edx = target address
 174         xorl    %eax, %eax      / zero upper bits of %eax
 175         decb    %al             / %al = -1
 176         lock
 177           xaddb %al, (%edx)     / %al = old value, dec (%edx)
 178         decb    %al             / return new value
 179         ret
 180         SET_SIZE(atomic_dec_uchar_nv)
 181         SET_SIZE(atomic_dec_8_nv)
 182 
 183         ENTRY(atomic_dec_16_nv)
 184         ALTENTRY(atomic_dec_ushort_nv)
 185         movl    4(%esp), %edx   / %edx = target address
 186         xorl    %eax, %eax      / zero upper bits of %eax
 187         decw    %ax             / %ax = -1
 188         lock
 189           xaddw %ax, (%edx)     / %ax = old value, dec (%edx)
 190         decw    %ax             / return new value
 191         ret
 192         SET_SIZE(atomic_dec_ushort_nv)
 193         SET_SIZE(atomic_dec_16_nv)
 194 
 195         ENTRY(atomic_dec_32_nv)
 196         ALTENTRY(atomic_dec_uint_nv)
 197         ALTENTRY(atomic_dec_ulong_nv)
 198         movl    4(%esp), %edx   / %edx = target address
 199         xorl    %eax, %eax      / %eax = 0
 200         decl    %eax            / %eax = -1
 201         lock
 202           xaddl %eax, (%edx)    / %eax = old value, dec (%edx)
 203         decl    %eax            / return new value
 204         ret
 205         SET_SIZE(atomic_dec_ulong_nv)
 206         SET_SIZE(atomic_dec_uint_nv)
 207         SET_SIZE(atomic_dec_32_nv)
 208 
 209         /*
 210          * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
 211          * separated, it is important to edit the libc i386 platform
 212          * specific mapfile and remove the NODYNSORT attribute
 213          * from atomic_dec_64_nv.
 214          */
 215         ENTRY(atomic_dec_64)
 216         ALTENTRY(atomic_dec_64_nv)
 217         pushl   %edi
 218         pushl   %ebx
 219         movl    12(%esp), %edi  / %edi = target address
 220         movl    (%edi), %eax
 221         movl    4(%edi), %edx   / %edx:%eax = old value
 222 1:
 223         xorl    %ebx, %ebx
 224         xorl    %ecx, %ecx
 225         not     %ecx
 226         not     %ebx            / %ecx:%ebx = -1
 227         addl    %eax, %ebx
 228         adcl    %edx, %ecx      / add in the carry from inc
 229         lock
 230         cmpxchg8b (%edi)        / try to stick it in
 231         jne     1b
 232         movl    %ebx, %eax
 233         movl    %ecx, %edx      / return new value
 234         popl    %ebx
 235         popl    %edi
 236         ret
 237         SET_SIZE(atomic_dec_64_nv)
 238         SET_SIZE(atomic_dec_64)
 239 
 240         ENTRY(atomic_add_8)
 241         ALTENTRY(atomic_add_char)
 242         movl    4(%esp), %eax
 243         movl    8(%esp), %ecx
 244         lock
 245         addb    %cl, (%eax)
 246         ret
 247         SET_SIZE(atomic_add_char)
 248         SET_SIZE(atomic_add_8)
 249 
 250         ENTRY(atomic_add_16)
 251         ALTENTRY(atomic_add_short)
 252         movl    4(%esp), %eax
 253         movl    8(%esp), %ecx
 254         lock
 255         addw    %cx, (%eax)
 256         ret
 257         SET_SIZE(atomic_add_short)
 258         SET_SIZE(atomic_add_16)
 259 
 260         ENTRY(atomic_add_32)
 261         ALTENTRY(atomic_add_int)
 262         ALTENTRY(atomic_add_ptr)
 263         ALTENTRY(atomic_add_long)
 264         movl    4(%esp), %eax
 265         movl    8(%esp), %ecx
 266         lock
 267         addl    %ecx, (%eax)
 268         ret
 269         SET_SIZE(atomic_add_long)
 270         SET_SIZE(atomic_add_ptr)
 271         SET_SIZE(atomic_add_int)
 272         SET_SIZE(atomic_add_32)
 273 
 274         ENTRY(atomic_or_8)
 275         ALTENTRY(atomic_or_uchar)
 276         movl    4(%esp), %eax
 277         movb    8(%esp), %cl
 278         lock
 279         orb     %cl, (%eax)
 280         ret
 281         SET_SIZE(atomic_or_uchar)
 282         SET_SIZE(atomic_or_8)
 283 
 284         ENTRY(atomic_or_16)
 285         ALTENTRY(atomic_or_ushort)
 286         movl    4(%esp), %eax
 287         movw    8(%esp), %cx
 288         lock
 289         orw     %cx, (%eax)
 290         ret
 291         SET_SIZE(atomic_or_ushort)
 292         SET_SIZE(atomic_or_16)
 293 
 294         ENTRY(atomic_or_32)
 295         ALTENTRY(atomic_or_uint)
 296         ALTENTRY(atomic_or_ulong)
 297         movl    4(%esp), %eax
 298         movl    8(%esp), %ecx
 299         lock
 300         orl     %ecx, (%eax)
 301         ret
 302         SET_SIZE(atomic_or_ulong)
 303         SET_SIZE(atomic_or_uint)
 304         SET_SIZE(atomic_or_32)
 305 
 306         ENTRY(atomic_and_8)
 307         ALTENTRY(atomic_and_uchar)
 308         movl    4(%esp), %eax
 309         movb    8(%esp), %cl
 310         lock
 311         andb    %cl, (%eax)
 312         ret
 313         SET_SIZE(atomic_and_uchar)
 314         SET_SIZE(atomic_and_8)
 315 
 316         ENTRY(atomic_and_16)
 317         ALTENTRY(atomic_and_ushort)
 318         movl    4(%esp), %eax
 319         movw    8(%esp), %cx
 320         lock
 321         andw    %cx, (%eax)
 322         ret
 323         SET_SIZE(atomic_and_ushort)
 324         SET_SIZE(atomic_and_16)
 325 
 326         ENTRY(atomic_and_32)
 327         ALTENTRY(atomic_and_uint)
 328         ALTENTRY(atomic_and_ulong)
 329         movl    4(%esp), %eax
 330         movl    8(%esp), %ecx
 331         lock
 332         andl    %ecx, (%eax)
 333         ret
 334         SET_SIZE(atomic_and_ulong)
 335         SET_SIZE(atomic_and_uint)
 336         SET_SIZE(atomic_and_32)
 337 
 338         ENTRY(atomic_add_8_nv)
 339         ALTENTRY(atomic_add_char_nv)
 340         movl    4(%esp), %edx   / %edx = target address
 341         movb    8(%esp), %cl    / %cl = delta
 342         movzbl  %cl, %eax       / %al = delta, zero extended
 343         lock
 344           xaddb %cl, (%edx)     / %cl = old value, (%edx) = sum
 345         addb    %cl, %al        / return old value plus delta
 346         ret
 347         SET_SIZE(atomic_add_char_nv)
 348         SET_SIZE(atomic_add_8_nv)
 349 
 350         ENTRY(atomic_add_16_nv)
 351         ALTENTRY(atomic_add_short_nv)
 352         movl    4(%esp), %edx   / %edx = target address
 353         movw    8(%esp), %cx    / %cx = delta
 354         movzwl  %cx, %eax       / %ax = delta, zero extended
 355         lock
 356           xaddw %cx, (%edx)     / %cx = old value, (%edx) = sum
 357         addw    %cx, %ax        / return old value plus delta
 358         ret
 359         SET_SIZE(atomic_add_short_nv)
 360         SET_SIZE(atomic_add_16_nv)
 361 
 362         ENTRY(atomic_add_32_nv)
 363         ALTENTRY(atomic_add_int_nv)
 364         ALTENTRY(atomic_add_ptr_nv)
 365         ALTENTRY(atomic_add_long_nv)
 366         movl    4(%esp), %edx   / %edx = target address
 367         movl    8(%esp), %eax   / %eax = delta
 368         movl    %eax, %ecx      / %ecx = delta
 369         lock
 370           xaddl %eax, (%edx)    / %eax = old value, (%edx) = sum
 371         addl    %ecx, %eax      / return old value plus delta
 372         ret
 373         SET_SIZE(atomic_add_long_nv)
 374         SET_SIZE(atomic_add_ptr_nv)
 375         SET_SIZE(atomic_add_int_nv)
 376         SET_SIZE(atomic_add_32_nv)
 377 
 378         /*
 379          * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
 380          * separated, it is important to edit the libc i386 platform
 381          * specific mapfile and remove the NODYNSORT attribute
 382          * from atomic_add_64_nv.
 383          */
 384         ENTRY(atomic_add_64)
 385         ALTENTRY(atomic_add_64_nv)
 386         pushl   %edi
 387         pushl   %ebx
 388         movl    12(%esp), %edi  / %edi = target address
 389         movl    (%edi), %eax
 390         movl    4(%edi), %edx   / %edx:%eax = old value
 391 1:
 392         movl    16(%esp), %ebx
 393         movl    20(%esp), %ecx  / %ecx:%ebx = delta
 394         addl    %eax, %ebx
 395         adcl    %edx, %ecx      / %ecx:%ebx = new value
 396         lock
 397         cmpxchg8b (%edi)        / try to stick it in
 398         jne     1b
 399         movl    %ebx, %eax
 400         movl    %ecx, %edx      / return new value
 401         popl    %ebx
 402         popl    %edi
 403         ret
 404         SET_SIZE(atomic_add_64_nv)
 405         SET_SIZE(atomic_add_64)
 406 
 407         ENTRY(atomic_or_8_nv)
 408         ALTENTRY(atomic_or_uchar_nv)
 409         movl    4(%esp), %edx   / %edx = target address
 410         movb    (%edx), %al     / %al = old value
 411 1:
 412         movl    8(%esp), %ecx   / %ecx = delta
 413         orb     %al, %cl        / %cl = new value
 414         lock
 415         cmpxchgb %cl, (%edx)    / try to stick it in
 416         jne     1b
 417         movzbl  %cl, %eax       / return new value
 418         ret
 419         SET_SIZE(atomic_or_uchar_nv)
 420         SET_SIZE(atomic_or_8_nv)
 421 
 422         ENTRY(atomic_or_16_nv)
 423         ALTENTRY(atomic_or_ushort_nv)
 424         movl    4(%esp), %edx   / %edx = target address
 425         movw    (%edx), %ax     / %ax = old value
 426 1:
 427         movl    8(%esp), %ecx   / %ecx = delta
 428         orw     %ax, %cx        / %cx = new value
 429         lock
 430         cmpxchgw %cx, (%edx)    / try to stick it in
 431         jne     1b
 432         movzwl  %cx, %eax       / return new value
 433         ret
 434         SET_SIZE(atomic_or_ushort_nv)
 435         SET_SIZE(atomic_or_16_nv)
 436 
 437         ENTRY(atomic_or_32_nv)
 438         ALTENTRY(atomic_or_uint_nv)
 439         ALTENTRY(atomic_or_ulong_nv)
 440         movl    4(%esp), %edx   / %edx = target address
 441         movl    (%edx), %eax    / %eax = old value
 442 1:
 443         movl    8(%esp), %ecx   / %ecx = delta
 444         orl     %eax, %ecx      / %ecx = new value
 445         lock
 446         cmpxchgl %ecx, (%edx)   / try to stick it in
 447         jne     1b
 448         movl    %ecx, %eax      / return new value
 449         ret
 450         SET_SIZE(atomic_or_ulong_nv)
 451         SET_SIZE(atomic_or_uint_nv)
 452         SET_SIZE(atomic_or_32_nv)
 453 
 454         /*
 455          * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
 456          * separated, it is important to edit the libc i386 platform
 457          * specific mapfile and remove the NODYNSORT attribute
 458          * from atomic_or_64_nv.
 459          */
 460         ENTRY(atomic_or_64)
 461         ALTENTRY(atomic_or_64_nv)
 462         pushl   %edi
 463         pushl   %ebx
 464         movl    12(%esp), %edi  / %edi = target address
 465         movl    (%edi), %eax
 466         movl    4(%edi), %edx   / %edx:%eax = old value
 467 1:
 468         movl    16(%esp), %ebx
 469         movl    20(%esp), %ecx  / %ecx:%ebx = delta
 470         orl     %eax, %ebx
 471         orl     %edx, %ecx      / %ecx:%ebx = new value
 472         lock
 473         cmpxchg8b (%edi)        / try to stick it in
 474         jne     1b
 475         movl    %ebx, %eax
 476         movl    %ecx, %edx      / return new value
 477         popl    %ebx
 478         popl    %edi
 479         ret
 480         SET_SIZE(atomic_or_64_nv)
 481         SET_SIZE(atomic_or_64)
 482 
 483         ENTRY(atomic_and_8_nv)
 484         ALTENTRY(atomic_and_uchar_nv)
 485         movl    4(%esp), %edx   / %edx = target address
 486         movb    (%edx), %al     / %al = old value
 487 1:
 488         movl    8(%esp), %ecx   / %ecx = delta
 489         andb    %al, %cl        / %cl = new value
 490         lock
 491         cmpxchgb %cl, (%edx)    / try to stick it in
 492         jne     1b
 493         movzbl  %cl, %eax       / return new value
 494         ret
 495         SET_SIZE(atomic_and_uchar_nv)
 496         SET_SIZE(atomic_and_8_nv)
 497 
 498         ENTRY(atomic_and_16_nv)
 499         ALTENTRY(atomic_and_ushort_nv)
 500         movl    4(%esp), %edx   / %edx = target address
 501         movw    (%edx), %ax     / %ax = old value
 502 1:
 503         movl    8(%esp), %ecx   / %ecx = delta
 504         andw    %ax, %cx        / %cx = new value
 505         lock
 506         cmpxchgw %cx, (%edx)    / try to stick it in
 507         jne     1b
 508         movzwl  %cx, %eax       / return new value
 509         ret
 510         SET_SIZE(atomic_and_ushort_nv)
 511         SET_SIZE(atomic_and_16_nv)
 512 
 513         ENTRY(atomic_and_32_nv)
 514         ALTENTRY(atomic_and_uint_nv)
 515         ALTENTRY(atomic_and_ulong_nv)
 516         movl    4(%esp), %edx   / %edx = target address
 517         movl    (%edx), %eax    / %eax = old value
 518 1:
 519         movl    8(%esp), %ecx   / %ecx = delta
 520         andl    %eax, %ecx      / %ecx = new value
 521         lock
 522         cmpxchgl %ecx, (%edx)   / try to stick it in
 523         jne     1b
 524         movl    %ecx, %eax      / return new value
 525         ret
 526         SET_SIZE(atomic_and_ulong_nv)
 527         SET_SIZE(atomic_and_uint_nv)
 528         SET_SIZE(atomic_and_32_nv)
 529 
 530         /*
 531          * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
 532          * separated, it is important to edit the libc i386 platform
 533          * specific mapfile and remove the NODYNSORT attribute
 534          * from atomic_and_64_nv.
 535          */
 536         ENTRY(atomic_and_64)
 537         ALTENTRY(atomic_and_64_nv)
 538         pushl   %edi
 539         pushl   %ebx
 540         movl    12(%esp), %edi  / %edi = target address
 541         movl    (%edi), %eax
 542         movl    4(%edi), %edx   / %edx:%eax = old value
 543 1:
 544         movl    16(%esp), %ebx
 545         movl    20(%esp), %ecx  / %ecx:%ebx = delta
 546         andl    %eax, %ebx
 547         andl    %edx, %ecx      / %ecx:%ebx = new value
 548         lock
 549         cmpxchg8b (%edi)        / try to stick it in
 550         jne     1b
 551         movl    %ebx, %eax
 552         movl    %ecx, %edx      / return new value
 553         popl    %ebx
 554         popl    %edi
 555         ret
 556         SET_SIZE(atomic_and_64_nv)
 557         SET_SIZE(atomic_and_64)
 558 
 559         ENTRY(atomic_cas_8)
 560         ALTENTRY(atomic_cas_uchar)
 561         movl    4(%esp), %edx
 562         movzbl  8(%esp), %eax
 563         movb    12(%esp), %cl
 564         lock
 565         cmpxchgb %cl, (%edx)
 566         ret
 567         SET_SIZE(atomic_cas_uchar)
 568         SET_SIZE(atomic_cas_8)
 569 
 570         ENTRY(atomic_cas_16)
 571         ALTENTRY(atomic_cas_ushort)
 572         movl    4(%esp), %edx
 573         movzwl  8(%esp), %eax
 574         movw    12(%esp), %cx
 575         lock
 576         cmpxchgw %cx, (%edx)
 577         ret
 578         SET_SIZE(atomic_cas_ushort)
 579         SET_SIZE(atomic_cas_16)
 580 
 581         ENTRY(atomic_cas_32)
 582         ALTENTRY(atomic_cas_uint)
 583         ALTENTRY(atomic_cas_ulong)
 584         ALTENTRY(atomic_cas_ptr)
 585         movl    4(%esp), %edx
 586         movl    8(%esp), %eax
 587         movl    12(%esp), %ecx
 588         lock
 589         cmpxchgl %ecx, (%edx)
 590         ret
 591         SET_SIZE(atomic_cas_ptr)
 592         SET_SIZE(atomic_cas_ulong)
 593         SET_SIZE(atomic_cas_uint)
 594         SET_SIZE(atomic_cas_32)
 595 
 596         ENTRY(atomic_cas_64)
 597         pushl   %ebx
 598         pushl   %esi
 599         movl    12(%esp), %esi
 600         movl    16(%esp), %eax
 601         movl    20(%esp), %edx
 602         movl    24(%esp), %ebx
 603         movl    28(%esp), %ecx
 604         lock
 605         cmpxchg8b (%esi)
 606         popl    %esi
 607         popl    %ebx
 608         ret
 609         SET_SIZE(atomic_cas_64)
 610 
 611         ENTRY(atomic_swap_8)
 612         ALTENTRY(atomic_swap_uchar)
 613         movl    4(%esp), %edx
 614         movzbl  8(%esp), %eax
 615         lock
 616         xchgb   %al, (%edx)
 617         ret
 618         SET_SIZE(atomic_swap_uchar)
 619         SET_SIZE(atomic_swap_8)
 620 
 621         ENTRY(atomic_swap_16)
 622         ALTENTRY(atomic_swap_ushort)
 623         movl    4(%esp), %edx
 624         movzwl  8(%esp), %eax
 625         lock
 626         xchgw   %ax, (%edx)
 627         ret
 628         SET_SIZE(atomic_swap_ushort)
 629         SET_SIZE(atomic_swap_16)
 630 
 631         ENTRY(atomic_swap_32)
 632         ALTENTRY(atomic_swap_uint)
 633         ALTENTRY(atomic_swap_ptr)
 634         ALTENTRY(atomic_swap_ulong)
 635         movl    4(%esp), %edx
 636         movl    8(%esp), %eax
 637         lock
 638         xchgl   %eax, (%edx)
 639         ret
 640         SET_SIZE(atomic_swap_ulong)
 641         SET_SIZE(atomic_swap_ptr)
 642         SET_SIZE(atomic_swap_uint)
 643         SET_SIZE(atomic_swap_32)
 644 
 645         ENTRY(atomic_swap_64)
 646         pushl   %esi
 647         pushl   %ebx
 648         movl    12(%esp), %esi
 649         movl    16(%esp), %ebx
 650         movl    20(%esp), %ecx
 651         movl    (%esi), %eax
 652         movl    4(%esi), %edx   / %edx:%eax = old value
 653 1:
 654         lock
 655         cmpxchg8b (%esi)
 656         jne     1b
 657         popl    %ebx
 658         popl    %esi
 659         ret
 660         SET_SIZE(atomic_swap_64)
 661 
 662         ENTRY(atomic_set_long_excl)
 663         movl    4(%esp), %edx   / %edx = target address
 664         movl    8(%esp), %ecx   / %ecx = bit id
 665         xorl    %eax, %eax
 666         lock
 667         btsl    %ecx, (%edx)
 668         jnc     1f
 669         decl    %eax            / return -1
 670 1:
 671         ret
 672         SET_SIZE(atomic_set_long_excl)
 673 
 674         ENTRY(atomic_clear_long_excl)
 675         movl    4(%esp), %edx   / %edx = target address
 676         movl    8(%esp), %ecx   / %ecx = bit id
 677         xorl    %eax, %eax
 678         lock
 679         btrl    %ecx, (%edx)
 680         jc      1f
 681         decl    %eax            / return -1
 682 1:
 683         ret
 684         SET_SIZE(atomic_clear_long_excl)
 685 
 686 #if !defined(_KERNEL)
 687 
 688         /*
 689          * NOTE: membar_enter, membar_exit, membar_producer, and 
 690          * membar_consumer are all identical routines. We define them
 691          * separately, instead of using ALTENTRY definitions to alias them
 692          * together, so that DTrace and debuggers will see a unique address
 693          * for them, allowing more accurate tracing.
 694         */
 695 
 696 
 697         ENTRY(membar_enter)
 698         lock
 699         xorl    $0, (%esp)
 700         ret
 701         SET_SIZE(membar_enter)
 702 
 703         ENTRY(membar_exit)
 704         lock
 705         xorl    $0, (%esp)
 706         ret
 707         SET_SIZE(membar_exit)
 708 
 709         ENTRY(membar_producer)
 710         lock
 711         xorl    $0, (%esp)
 712         ret
 713         SET_SIZE(membar_producer)
 714 
 715         ENTRY(membar_consumer)
 716         lock
 717         xorl    $0, (%esp)
 718         ret
 719         SET_SIZE(membar_consumer)
 720 
 721 #endif  /* !_KERNEL */