Print this page
5042 stop using deprecated atomic functions


 202 
 203         HXGE_DEBUG_MSG((hxgep, TX_CTL,
 204             "==> hxge_start: dump packets (IP ORIGINAL b_rptr $%p): %s",
 205             mp->b_rptr, hxge_dump_packet((char *)mp->b_rptr, dump_len)));
 206 #endif
 207 
 208         tdc_stats = tx_ring_p->tdc_stats;
 209         mark_mode = (tx_ring_p->descs_pending &&
 210             ((tx_ring_p->tx_ring_size - tx_ring_p->descs_pending) <
 211             hxge_tx_minfree));
 212 
 213         HXGE_DEBUG_MSG((hxgep, TX_CTL,
 214             "TX Descriptor ring is channel %d mark mode %d",
 215             tx_ring_p->tdc, mark_mode));
 216 
 217         if (!hxge_txdma_reclaim(hxgep, tx_ring_p, hxge_tx_minfree)) {
 218                 HXGE_DEBUG_MSG((hxgep, TX_CTL,
 219                     "TX Descriptor ring is full: channel %d", tx_ring_p->tdc));
 220                 HXGE_DEBUG_MSG((hxgep, TX_CTL,
 221                     "TX Descriptor ring is full: channel %d", tx_ring_p->tdc));
 222                 (void) cas32((uint32_t *)&tx_ring_p->queueing, 0, 1);
 223                 tdc_stats->tx_no_desc++;
 224                 MUTEX_EXIT(&tx_ring_p->lock);
 225                 status = 1;
 226                 goto hxge_start_fail1;
 227         }
 228 
 229         nmp = mp;
 230         i = sop_index = tx_ring_p->wr_index;
 231         nmblks = 0;
 232         ngathers = 0;
 233         pkt_len = 0;
 234         pack_len = 0;
 235         clen = 0;
 236         last_bidx = -1;
 237         good_packet = B_TRUE;
 238 
 239         desc_area = tx_ring_p->tdc_desc;
 240         hpi_handle = desc_area.hpi_handle;
 241         hpi_desc_handle.regh = (hxge_os_acc_handle_t)
 242             DMA_COMMON_ACC_HANDLE(desc_area);




 202 
 203         HXGE_DEBUG_MSG((hxgep, TX_CTL,
 204             "==> hxge_start: dump packets (IP ORIGINAL b_rptr $%p): %s",
 205             mp->b_rptr, hxge_dump_packet((char *)mp->b_rptr, dump_len)));
 206 #endif
 207 
 208         tdc_stats = tx_ring_p->tdc_stats;
 209         mark_mode = (tx_ring_p->descs_pending &&
 210             ((tx_ring_p->tx_ring_size - tx_ring_p->descs_pending) <
 211             hxge_tx_minfree));
 212 
 213         HXGE_DEBUG_MSG((hxgep, TX_CTL,
 214             "TX Descriptor ring is channel %d mark mode %d",
 215             tx_ring_p->tdc, mark_mode));
 216 
 217         if (!hxge_txdma_reclaim(hxgep, tx_ring_p, hxge_tx_minfree)) {
 218                 HXGE_DEBUG_MSG((hxgep, TX_CTL,
 219                     "TX Descriptor ring is full: channel %d", tx_ring_p->tdc));
 220                 HXGE_DEBUG_MSG((hxgep, TX_CTL,
 221                     "TX Descriptor ring is full: channel %d", tx_ring_p->tdc));
 222                 (void) atomic_cas_32((uint32_t *)&tx_ring_p->queueing, 0, 1);
 223                 tdc_stats->tx_no_desc++;
 224                 MUTEX_EXIT(&tx_ring_p->lock);
 225                 status = 1;
 226                 goto hxge_start_fail1;
 227         }
 228 
 229         nmp = mp;
 230         i = sop_index = tx_ring_p->wr_index;
 231         nmblks = 0;
 232         ngathers = 0;
 233         pkt_len = 0;
 234         pack_len = 0;
 235         clen = 0;
 236         last_bidx = -1;
 237         good_packet = B_TRUE;
 238 
 239         desc_area = tx_ring_p->tdc_desc;
 240         hpi_handle = desc_area.hpi_handle;
 241         hpi_desc_handle.regh = (hxge_os_acc_handle_t)
 242             DMA_COMMON_ACC_HANDLE(desc_area);