Home
last modified time | relevance | path

Searched refs:rxdma (Results 1 – 9 of 9) sorted by relevance

/openbsd/sys/dev/pci/
H A Dif_igc.c433 if (igc_dma_malloc(sc, rsize, &rxr->rxdma)) { in igc_allocate_queues()
455 igc_dma_free(sc, &rxr->rxdma); in igc_allocate_queues()
1254 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in igc_rxfill()
1270 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in igc_rxfill()
1321 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in igc_rxeof()
1328 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in igc_rxeof()
1416 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in igc_rxeof()
2161 error = bus_dmamap_create(rxr->rxdma.dma_tag, in igc_allocate_receive_buffers()
2170 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in igc_allocate_receive_buffers()
2171 rxr->rxdma.dma_map->dm_mapsize, in igc_allocate_receive_buffers()
[all …]
H A Dif_ix.c2205 &rxr->rxdma, BUS_DMA_NOWAIT)) { in ixgbe_allocate_queues()
2230 ixgbe_dma_free(sc, &rxr->rxdma); in ixgbe_allocate_queues()
2760 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in ixgbe_allocate_receive_buffers()
2761 rxr->rxdma.dma_map->dm_mapsize, in ixgbe_allocate_receive_buffers()
2815 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ixgbe_rxfill()
2816 0, rxr->rxdma.dma_map->dm_mapsize, in ixgbe_rxfill()
2832 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ixgbe_rxfill()
2833 0, rxr->rxdma.dma_map->dm_mapsize, in ixgbe_rxfill()
3142 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ixgbe_rxeof()
3148 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ixgbe_rxeof()
[all …]
H A Dif_ixgb.c222 if (ixgb_dma_malloc(sc, rsize, &sc->rxdma, BUS_DMA_NOWAIT)) { in ixgb_attach()
227 sc->rx_desc_base = (struct ixgb_rx_desc *) sc->rxdma.dma_vaddr; in ixgb_attach()
250 ixgb_dma_free(sc, &sc->rxdma); in ixgb_attach()
1525 bus_dmamap_sync(sc->rxdma.dma_tag, sc->rxdma.dma_map, 0, in ixgb_allocate_receive_structures()
1526 sc->rxdma.dma_map->dm_mapsize, in ixgb_allocate_receive_structures()
1581 bus_addr = sc->rxdma.dma_map->dm_segs[0].ds_addr; in ixgb_initialize_receive_unit()
1721 bus_dmamap_sync(sc->rxdma.dma_tag, sc->rxdma.dma_map, 0, in ixgb_rxeof()
1722 sc->rxdma.dma_map->dm_mapsize, BUS_DMASYNC_POSTREAD); in ixgb_rxeof()
1794 bus_dmamap_sync(sc->rxdma.dma_tag, sc->rxdma.dma_map, 0, in ixgb_rxeof()
1795 sc->rxdma.dma_map->dm_mapsize, in ixgb_rxeof()
H A Dif_ngbe.c641 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in ngbe_rxfill()
657 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in ngbe_rxfill()
1220 if (ngbe_dma_malloc(sc, rsize, &rxr->rxdma)) { in ngbe_allocate_queues()
1242 ngbe_dma_free(sc, &rxr->rxdma); in ngbe_allocate_queues()
1283 bus_dmamap_unload(rxr->rxdma.dma_tag, in ngbe_free_receive_buffers()
1360 error = bus_dmamap_create(rxr->rxdma.dma_tag, in ngbe_allocate_receive_buffers()
1369 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0, in ngbe_allocate_receive_buffers()
1370 rxr->rxdma.dma_map->dm_mapsize, in ngbe_allocate_receive_buffers()
4290 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ngbe_rxeof()
4297 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, in ngbe_rxeof()
[all …]
H A Dif_ixgb.h279 struct ixgb_dma_alloc rxdma; /* bus_dma glue for rx desc */ member
H A Dif_ix.h200 struct ixgbe_dma_alloc rxdma; member
H A Dif_igc.h282 struct igc_dma_alloc rxdma; member
H A Dif_ngbereg.h1018 struct ngbe_dma_alloc rxdma; member
H A Dif_ixv.c970 rdba = rxr->rxdma.dma_map->dm_segs[0].ds_addr; in ixv_initialize_receive_units()