Searched refs:irq_status (Results 1 – 7 of 7) sorted by relevance
16 d->irq_status = DMA_DONE | DMA_ERR; in _mdma_memcpy()22 d->irq_status = DMA_DONE | DMA_ERR; in _mdma_memcpy()28 s->irq_status = DMA_DONE | DMA_ERR; in _mdma_memcpy()34 while (!(d->irq_status & DMA_DONE)) in _mdma_memcpy()
22 bu16 BFIN_MMR_16 (irq_status);
61 bu16 BFIN_MMR_16 (irq_status);88 return (dma->irq_status & DMA_RUN); in bfin_dma_running()122 dma->irq_status |= DMA_ERR; in bfin_dma_process_desc()214 dma->irq_status = (dma->irq_status & ~DMA_RUN) | DMA_DONE; in bfin_dma_finish_x()307 dma->irq_status |= DMA_ERR; in bfin_dma_hw_event_callback()383 dma->irq_status |= DMA_RUN; in bfin_dma_io_write_buffer()391 dma->irq_status &= ~DMA_RUN; in bfin_dma_io_write_buffer()395 case mmr_offset(irq_status): in bfin_dma_io_write_buffer()
1102 struct physdev_irq_status_query irq_status; in hypervisor_prime_pirq_event() local1103 irq_status.irq = pirq; in hypervisor_prime_pirq_event()1104 if (HYPERVISOR_physdev_op(PHYSDEVOP_irq_status_query, &irq_status) < 0) in hypervisor_prime_pirq_event()1106 if (irq_status.flags & XENIRQSTAT_needs_eoi) { in hypervisor_prime_pirq_event()