Print this page
first pass

Split Close
Expand all
Collapse all
          --- old/usr/src/common/crypto/aes/aes_impl.c
          +++ new/usr/src/common/crypto/aes/aes_impl.c
↓ open down ↓ 79 lines elided ↑ open up ↑
  80   80   * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE
  81   81   * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  82   82   * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  83   83   * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  84   84   * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  85   85   * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  86   86   * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
  87   87   * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  88   88   */
  89   89  
  90      -/* EXPORT DELETE START */
  91      -
  92   90  #if defined(sun4u)
  93   91  /* External assembly functions: */
  94   92  extern void aes_encrypt_impl(const uint32_t rk[], int Nr, const uint32_t pt[4],
  95   93          uint32_t ct[4]);
  96   94  extern void aes_decrypt_impl(const uint32_t rk[], int Nr, const uint32_t ct[4],
  97   95          uint32_t pt[4]);
  98   96  
  99   97  #define AES_ENCRYPT_IMPL(a, b, c, d, e) aes_encrypt_impl(a, b, c, d)
 100   98  #define AES_DECRYPT_IMPL(a, b, c, d, e) aes_decrypt_impl(a, b, c, d)
 101   99  
↓ open down ↓ 1447 lines elided ↑ open up ↑
1549 1547          pt[2] = s2;
1550 1548  
1551 1549          s3 = (Td4[t3 >> 24] & 0xff000000) ^
1552 1550              (Td4[(t2 >> 16) & 0xff] & 0x00ff0000) ^
1553 1551              (Td4[(t1 >>  8) & 0xff] & 0x0000ff00) ^
1554 1552              (Td4[t0 & 0xff] & 0x000000ff) ^
1555 1553              rk[3];
1556 1554          pt[3] = s3;
1557 1555  }
1558 1556  #endif  /* sun4u, __amd64 */
1559      -/* EXPORT DELETE END */
1560 1557  
1561 1558  
1562 1559  /*
1563 1560   * Initialize AES encryption and decryption key schedules.
1564 1561   *
1565 1562   * Parameters:
1566 1563   * cipherKey    User key
1567 1564   * keyBits      AES key size (128, 192, or 256 bits)
1568 1565   * keysched     AES key schedule to be initialized, of type aes_key_t.
1569 1566   *              Allocated by aes_alloc_keysched().
1570 1567   */
1571 1568  void
1572 1569  aes_init_keysched(const uint8_t *cipherKey, uint_t keyBits, void *keysched)
1573 1570  {
1574      -/* EXPORT DELETE START */
1575 1571          aes_key_t       *newbie = keysched;
1576 1572          uint_t          keysize, i, j;
1577 1573          union {
1578 1574                  uint64_t        ka64[4];
1579 1575                  uint32_t        ka32[8];
1580 1576                  } keyarr;
1581 1577  
1582 1578          switch (keyBits) {
1583 1579          case 128:
1584 1580                  newbie->nr = 10;
↓ open down ↓ 32 lines elided ↑ open up ↑
1617 1613                  bcopy(cipherKey, keyarr.ka32, keysize);
1618 1614          }
1619 1615  
1620 1616  #else   /* byte swap */
1621 1617          for (i = 0, j = 0; j < keysize; i++, j += 4) {
1622 1618                  keyarr.ka32[i] = htonl(*(uint32_t *)(void *)&cipherKey[j]);
1623 1619          }
1624 1620  #endif
1625 1621  
1626 1622          aes_setupkeys(newbie, keyarr.ka32, keyBits);
1627      -/* EXPORT DELETE END */
1628 1623  }
1629 1624  
1630 1625  
1631 1626  /*
1632 1627   * Encrypt one block using AES.
1633 1628   * Align if needed and (for x86 32-bit only) byte-swap.
1634 1629   *
1635 1630   * Parameters:
1636 1631   * ks   Key schedule, of type aes_key_t
1637 1632   * pt   Input block (plain text)
1638 1633   * ct   Output block (crypto text).  Can overlap with pt
1639 1634   */
1640 1635  int
1641 1636  aes_encrypt_block(const void *ks, const uint8_t *pt, uint8_t *ct)
1642 1637  {
1643      -/* EXPORT DELETE START */
1644 1638          aes_key_t       *ksch = (aes_key_t *)ks;
1645 1639  
1646 1640  #ifndef AES_BYTE_SWAP
1647 1641          if (IS_P2ALIGNED2(pt, ct, sizeof (uint32_t))) {
1648 1642                  /* LINTED:  pointer alignment */
1649 1643                  AES_ENCRYPT_IMPL(&ksch->encr_ks.ks32[0], ksch->nr,
1650 1644                      /* LINTED:  pointer alignment */
1651 1645                      (uint32_t *)pt, (uint32_t *)ct, ksch->flags);
1652 1646          } else {
1653 1647  #endif
↓ open down ↓ 17 lines elided ↑ open up ↑
1671 1665  #ifndef AES_BYTE_SWAP
1672 1666                  bcopy(&buffer, ct, AES_BLOCK_LEN);
1673 1667          }
1674 1668  
1675 1669  #else   /* byte swap */
1676 1670                  *(uint32_t *)(void *)&ct[0] = htonl(buffer[0]);
1677 1671                  *(uint32_t *)(void *)&ct[4] = htonl(buffer[1]);
1678 1672                  *(uint32_t *)(void *)&ct[8] = htonl(buffer[2]);
1679 1673                  *(uint32_t *)(void *)&ct[12] = htonl(buffer[3]);
1680 1674  #endif
1681      -/* EXPORT DELETE END */
1682 1675          return (CRYPTO_SUCCESS);
1683 1676  }
1684 1677  
1685 1678  
1686 1679  /*
1687 1680   * Decrypt one block using AES.
1688 1681   * Align and byte-swap if needed.
1689 1682   *
1690 1683   * Parameters:
1691 1684   * ks   Key schedule, of type aes_key_t
1692 1685   * ct   Input block (crypto text)
1693 1686   * pt   Output block (plain text). Can overlap with pt
1694 1687   */
1695 1688  int
1696 1689  aes_decrypt_block(const void *ks, const uint8_t *ct, uint8_t *pt)
1697 1690  {
1698      -/* EXPORT DELETE START */
1699 1691          aes_key_t       *ksch = (aes_key_t *)ks;
1700 1692  
1701 1693  #ifndef AES_BYTE_SWAP
1702 1694          if (IS_P2ALIGNED2(ct, pt, sizeof (uint32_t))) {
1703 1695                  /* LINTED:  pointer alignment */
1704 1696                  AES_DECRYPT_IMPL(&ksch->decr_ks.ks32[0], ksch->nr,
1705 1697                      /* LINTED:  pointer alignment */
1706 1698                      (uint32_t *)ct, (uint32_t *)pt, ksch->flags);
1707 1699          } else {
1708 1700  #endif
↓ open down ↓ 18 lines elided ↑ open up ↑
1727 1719                  bcopy(&buffer, pt, AES_BLOCK_LEN);
1728 1720          }
1729 1721  
1730 1722  #else   /* byte swap */
1731 1723          *(uint32_t *)(void *)&pt[0] = htonl(buffer[0]);
1732 1724          *(uint32_t *)(void *)&pt[4] = htonl(buffer[1]);
1733 1725          *(uint32_t *)(void *)&pt[8] = htonl(buffer[2]);
1734 1726          *(uint32_t *)(void *)&pt[12] = htonl(buffer[3]);
1735 1727  #endif
1736 1728  
1737      -/* EXPORT DELETE END */
1738 1729          return (CRYPTO_SUCCESS);
1739 1730  }
1740 1731  
1741 1732  
1742 1733  /*
1743 1734   * Allocate key schedule for AES.
1744 1735   *
1745 1736   * Return the pointer and set size to the number of bytes allocated.
1746 1737   * Memory allocated must be freed by the caller when done.
1747 1738   *
1748 1739   * Parameters:
1749 1740   * size         Size of key schedule allocated, in bytes
1750 1741   * kmflag       Flag passed to kmem_alloc(9F); ignored in userland.
1751 1742   */
1752 1743  /* ARGSUSED */
1753 1744  void *
1754 1745  aes_alloc_keysched(size_t *size, int kmflag)
1755 1746  {
1756      -/* EXPORT DELETE START */
1757 1747          aes_key_t *keysched;
1758 1748  
1759 1749  #ifdef  _KERNEL
1760 1750          keysched = (aes_key_t *)kmem_alloc(sizeof (aes_key_t), kmflag);
1761 1751  #else   /* !_KERNEL */
1762 1752          keysched = (aes_key_t *)malloc(sizeof (aes_key_t));
1763 1753  #endif  /* _KERNEL */
1764 1754  
1765 1755          if (keysched != NULL) {
1766 1756                  *size = sizeof (aes_key_t);
1767 1757                  return (keysched);
1768 1758          }
1769      -/* EXPORT DELETE END */
1770 1759          return (NULL);
1771 1760  }
1772 1761  
1773 1762  
1774 1763  #ifdef __amd64
1775 1764  /*
1776 1765   * Return 1 if executing on Intel with AES-NI instructions,
1777 1766   * otherwise 0 (i.e., Intel without AES-NI or AMD64).
1778 1767   * Cache the result, as the CPU can't change.
1779 1768   *
↓ open down ↓ 22 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX