Home
last modified time | relevance | path

Searched refs:rb (Results 1 – 25 of 376) sorted by relevance

12345678910>>...16

/linux/kernel/events/
H A Dring_buffer.c42 struct perf_buffer *rb = handle->rb; in perf_output_get_handle() local
56 struct perf_buffer *rb = handle->rb; in perf_output_put_handle() local
174 rb = rcu_dereference(event->rb); in __perf_output_begin()
186 handle->rb = rb; in __perf_output_begin()
238 local_add(rb->watermark, &rb->wakeup); in __perf_output_begin()
418 handle->rb = rb; in perf_aux_output_begin()
467 if (rb->aux_head - rb->aux_wakeup >= rb->aux_watermark) { in rb_need_aux_wakeup()
468 rb->aux_wakeup = rounddown(rb->aux_head, rb->aux_watermark); in rb_need_aux_wakeup()
488 struct perf_buffer *rb = handle->rb; in perf_aux_output_end() local
545 struct perf_buffer *rb = handle->rb; in perf_aux_output_skip() local
[all …]
H A Dinternal.h65 struct perf_buffer *rb; in rb_free_rcu() local
68 rb_free(rb); in rb_free_rcu()
73 if (!pause && rb->nr_pages) in rb_toggle_paused()
74 rb->paused = 0; in rb_toggle_paused()
76 rb->paused = 1; in rb_toggle_paused()
90 return !!rb->aux_nr_pages; in rb_has_aux()
108 return rb->page_order; in page_order()
121 return rb->nr_pages << page_order(rb); in data_page_nr()
126 return rb->nr_pages << (PAGE_SHIFT + page_order(rb)); in perf_data_size()
131 return rb->aux_nr_pages << PAGE_SHIFT; in perf_aux_size()
[all …]
/linux/tools/lib/bpf/
H A Dringbuf.c102 tmp = libbpf_reallocarray(rb->rings, rb->ring_cnt + 1, sizeof(*rb->rings)); in ring_buffer__add()
107 tmp = libbpf_reallocarray(rb->events, rb->ring_cnt + 1, sizeof(*rb->events)); in ring_buffer__add()
115 rb->rings[rb->ring_cnt] = r; in ring_buffer__add()
152 e = &rb->events[rb->ring_cnt]; in ring_buffer__add()
180 ringbuf_free_ring(rb, rb->rings[i]); in ring_buffer__free()
199 rb = calloc(1, sizeof(*rb)); in ring_buffer__new()
341 cnt = epoll_wait(rb->epoll_fd, rb->events, rb->ring_cnt, timeout_ms); in ring_buffer__poll()
429 munmap(rb->producer_pos, rb->page_size + 2 * (rb->mask + 1)); in user_ringbuf_unmap_ring()
503 rb->data = tmp + rb->page_size; in user_ringbuf_map()
525 rb = calloc(1, sizeof(*rb)); in user_ring_buffer__new()
[all …]
/linux/drivers/scsi/bfa/
H A Dbfa_ioc_ct.c185 void __iomem *rb; in bfa_ioc_ct_reg_init() local
188 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init()
244 void __iomem *rb; in bfa_ioc_ct2_reg_init() local
247 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct2_reg_init()
597 writel(0, (rb + OP_MODE)); in bfa_ioc_ct_pll_init()
821 bfa_ioc_ct2_sclk_init(rb); in bfa_ioc_ct2_clk_reset()
822 bfa_ioc_ct2_lclk_init(rb); in bfa_ioc_ct2_clk_reset()
898 bfa_ioc_ct2_clk_reset(rb); in bfa_ioc_ct2_pll_init()
901 bfa_ioc_ct2_mac_reset(rb); in bfa_ioc_ct2_pll_init()
903 bfa_ioc_ct2_clk_reset(rb); in bfa_ioc_ct2_pll_init()
[all …]
H A Dbfa_ioc_cb.c138 void __iomem *rb; in bfa_ioc_cb_reg_init() local
141 rb = bfa_ioc_bar0(ioc); in bfa_ioc_cb_reg_init()
186 ioc->ioc_regs.err_set = (rb + ERR_SET_REG); in bfa_ioc_cb_reg_init()
369 join_bits = readl(rb + BFA_IOC0_STATE_REG) & in bfa_ioc_cb_pll_init()
372 join_bits = readl(rb + BFA_IOC1_STATE_REG) & in bfa_ioc_cb_pll_init()
375 writel(0xffffffffU, (rb + HOSTFN0_INT_MSK)); in bfa_ioc_cb_pll_init()
376 writel(0xffffffffU, (rb + HOSTFN1_INT_MSK)); in bfa_ioc_cb_pll_init()
383 rb + APP_PLL_SCLK_CTL_REG); in bfa_ioc_cb_pll_init()
386 rb + APP_PLL_LCLK_CTL_REG); in bfa_ioc_cb_pll_init()
391 rb + APP_PLL_SCLK_CTL_REG); in bfa_ioc_cb_pll_init()
[all …]
/linux/drivers/net/ethernet/brocade/bna/
H A Dbfa_ioc_ct.c251 void __iomem *rb; in bfa_ioc_ct_reg_init() local
254 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init()
310 void __iomem *rb; in bfa_ioc_ct2_reg_init() local
313 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct2_reg_init()
616 writel(0, (rb + OP_MODE)); in bfa_ioc_ct_pll_init()
620 (rb + ETH_MAC_SER_REG)); in bfa_ioc_ct_pll_init()
624 (rb + ETH_MAC_SER_REG)); in bfa_ioc_ct_pll_init()
789 bfa_ioc_ct2_sclk_init(rb); in bfa_ioc_ct2_mac_reset()
790 bfa_ioc_ct2_lclk_init(rb); in bfa_ioc_ct2_mac_reset()
888 bfa_ioc_ct2_mac_reset(rb); in bfa_ioc_ct2_pll_init()
[all …]
/linux/fs/xfs/scrub/
H A Dbmap_repair.c97 struct xrep_bmap *rb, in xrep_bmap_discover_shared() argument
122 struct xrep_bmap *rb, in xrep_bmap_from_rmap() argument
446 error = xfarray_load(rb->bmap_records, rb->array_cur++, in xrep_bmap_get_records()
549 return xrep_ino_ensure_extent_count(rb->sc, rb->whichfork, in xrep_bmap_extents_load()
567 &rb->new_bmapbt.bload, rb->real_mappings); in xrep_bmap_btree_load()
594 error = xfs_btree_bload(bmap_cur, &rb->new_bmapbt.bload, rb); in xrep_bmap_btree_load()
634 error = xrep_newbt_init_inode(&rb->new_bmapbt, sc, rb->whichfork, in xrep_bmap_build_new_fork()
655 if (rb->real_mappings <= XFS_IFORK_MAXEXT(sc->ip, rb->whichfork)) { in xrep_bmap_build_new_fork()
815 if (!rb) in xrep_bmap()
817 rb->sc = sc; in xrep_bmap()
[all …]
/linux/kernel/bpf/
H A Dringbuf.c134 if (rb) { in bpf_ringbuf_area_alloc()
138 return rb; in bpf_ringbuf_area_alloc()
171 if (!rb) in bpf_ringbuf_alloc()
183 return rb; in bpf_ringbuf_alloc()
205 if (!rb_map->rb) { in ringbuf_map_alloc()
221 vunmap(rb); in bpf_ringbuf_free()
343 rb = container_of(map, struct bpf_ringbuf_map, map)->rb; in ringbuf_map_mem_usage()
438 hdr = (void *)rb->data + (prod_pos & rb->mask); in __bpf_ringbuf_reserve()
490 cons_pos = smp_load_acquire(&rb->consumer_pos) & rb->mask; in bpf_ringbuf_commit()
556 rb = container_of(map, struct bpf_ringbuf_map, map)->rb; in BPF_CALL_2()
[all …]
/linux/drivers/hid/intel-ish-hid/ishtp/
H A Dclient-buffers.c29 if (!rb) { in ishtp_cl_alloc_rx_ring()
109 kfree(rb); in ishtp_cl_free_rx_ring()
119 kfree(rb); in ishtp_cl_free_rx_ring()
171 kfree(rb); in ishtp_io_rb_free()
187 if (!rb) in ishtp_io_rb_init()
191 rb->cl = cl; in ishtp_io_rb_init()
193 return rb; in ishtp_io_rb_init()
207 if (!rb) in ishtp_io_rb_alloc_buf()
235 if (!rb || !rb->cl) in ishtp_cl_io_rb_recycle()
291 if (rb) in ishtp_cl_rx_get_rb()
[all …]
H A Dclient.c50 if (rb->cl && ishtp_cl_cmp_id(cl, rb->cl)) { in ishtp_read_list_flush()
642 rb = NULL; in ishtp_cl_read_start()
650 rb->cl = cl; in ishtp_cl_read_start()
651 rb->buf_idx = 0; in ishtp_cl_read_start()
1023 cl = rb->cl; in recv_ishtp_cl_msg()
1030 if (rb->buffer.size == 0 || rb->buffer.data == NULL) { in recv_ishtp_cl_msg()
1046 if (rb->buffer.size < ishtp_hdr->length + rb->buf_idx) { in recv_ishtp_cl_msg()
1058 buffer = rb->buffer.data + rb->buf_idx; in recv_ishtp_cl_msg()
1143 cl = rb->cl; in recv_ishtp_cl_msg_dma()
1152 if (rb->buffer.size == 0 || rb->buffer.data == NULL) { in recv_ishtp_cl_msg_dma()
[all …]
/linux/lib/
H A Drbtree_test.c20 struct rb_node rb; member
166 for (count = 0; rb; rb = rb_parent(rb)) in black_path_count()
183 struct rb_node *rb; in check_postorder() local
185 for (rb = rb_first_postorder(&root.rb_root); rb; rb = rb_next_postorder(rb)) in check_postorder()
193 struct rb_node *rb; in check() local
197 for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) { in check()
198 struct test_node *node = rb_entry(rb, struct test_node, rb); in check()
201 (!rb_parent(rb) || is_red(rb_parent(rb)))); in check()
205 WARN_ON_ONCE((!rb->rb_left || !rb->rb_right) && in check()
223 for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) { in check_augmented()
[all …]
/linux/kernel/printk/
H A Dprintk_ringbuffer.c1392 e->rb = rb; in prb_reserve_in_last()
1570 desc_update_last_finalized(rb); in desc_make_final()
1619 if (!desc_reserve(rb, &id)) { in prb_reserve()
1621 atomic_long_inc(&rb->fail); in prb_reserve()
1641 e->rb = rb; in prb_reserve()
1760 desc_make_final(e->rb, e->id); in prb_commit()
1782 desc_update_last_finalized(e->rb); in prb_final_commit()
2114 tail_seq = prb_first_seq(rb); in _prb_read_valid()
2313 rb->desc_ring.descs = descs; in prb_init()
2314 rb->desc_ring.infos = infos; in prb_init()
[all …]
H A Dprintk_ringbuffer.h106 struct printk_ringbuffer *rb; member
326 void prb_init(struct printk_ringbuffer *rb,
369 #define prb_for_each_record(from, rb, s, r) \ argument
387 #define prb_for_each_info(from, rb, s, i, lc) \ argument
390 bool prb_read_valid(struct printk_ringbuffer *rb, u64 seq,
395 u64 prb_first_seq(struct printk_ringbuffer *rb);
396 u64 prb_first_valid_seq(struct printk_ringbuffer *rb);
397 u64 prb_next_seq(struct printk_ringbuffer *rb);
398 u64 prb_next_reserve_seq(struct printk_ringbuffer *rb);
403 #define __ulseq_to_u64seq(rb, ulseq) (ulseq) argument
[all …]
/linux/drivers/gpu/drm/
H A Ddrm_mm.c175 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
177 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node()
182 rb = rb_parent(rb); in drm_mm_interval_tree_add_node()
185 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
189 rb = NULL; in drm_mm_interval_tree_add_node()
195 rb = *link; in drm_mm_interval_tree_add_node()
196 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node()
207 rb_link_node(&node->rb, rb, link); in drm_mm_interval_tree_add_node()
316 rb = rb->rb_right; in best_hole()
318 rb = rb->rb_left; in best_hole()
[all …]
H A Ddrm_prime.c110 rb = NULL; in drm_prime_add_buf_handle()
115 rb = *p; in drm_prime_add_buf_handle()
125 rb = NULL; in drm_prime_add_buf_handle()
130 rb = *p; in drm_prime_add_buf_handle()
149 while (rb) { in drm_prime_lookup_buf_by_handle()
156 rb = rb->rb_right; in drm_prime_lookup_buf_by_handle()
158 rb = rb->rb_left; in drm_prime_lookup_buf_by_handle()
179 rb = rb->rb_right; in drm_prime_lookup_buf_handle()
181 rb = rb->rb_left; in drm_prime_lookup_buf_handle()
208 rb = rb->rb_right; in drm_prime_remove_buf_handle()
[all …]
/linux/mm/
H A Dinterval_tree.c23 INTERVAL_TREE_DEFINE(struct vm_area_struct, shared.rb,
38 if (!prev->shared.rb.rb_right) { in vma_interval_tree_insert_after()
40 link = &prev->shared.rb.rb_right; in vma_interval_tree_insert_after()
42 parent = rb_entry(prev->shared.rb.rb_right, in vma_interval_tree_insert_after()
43 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
46 while (parent->shared.rb.rb_left) { in vma_interval_tree_insert_after()
47 parent = rb_entry(parent->shared.rb.rb_left, in vma_interval_tree_insert_after()
48 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
52 link = &parent->shared.rb.rb_left; in vma_interval_tree_insert_after()
56 rb_link_node(&node->shared.rb, &parent->shared.rb, link); in vma_interval_tree_insert_after()
[all …]
/linux/drivers/misc/mchp_pci1xxxx/
H A Dmchp_pci1xxxx_otpe2p.c99 void __iomem *rb = priv->reg_base; in is_eeprom_responsive() local
104 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in is_eeprom_responsive()
106 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in is_eeprom_responsive()
124 void __iomem *rb = priv->reg_base; in pci1xxxx_eeprom_read() local
148 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in pci1xxxx_eeprom_read()
166 void __iomem *rb = priv->reg_base; in pci1xxxx_eeprom_write() local
194 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in pci1xxxx_eeprom_write()
221 void __iomem *rb = priv->reg_base; in pci1xxxx_otp_read() local
251 rb + MMAP_OTP_OFFSET(OTP_STATUS_OFFSET)); in pci1xxxx_otp_read()
271 void __iomem *rb = priv->reg_base; in pci1xxxx_otp_write() local
[all …]
/linux/Documentation/translations/zh_CN/core-api/
H A Drbtree.rst274 if (node->rb.rb_left) {
276 rb_entry(node->rb.rb_left,
294 if (node->rb.rb_right) {
296 struct interval_tree_node, rb);
311 if (node->rb.rb_left) {
317 if (node->rb.rb_right) {
328 while (rb != stop) {
330 rb_entry(rb, struct interval_tree_node, rb);
335 rb = rb_parent(&node->rb);
377 link = &parent->rb.rb_left;
[all …]
/linux/arch/arm64/crypto/
H A Dsm3-neon-core.S42 #define rb w4 macro
356 ldp ra, rb, [RSTATE, #0]
401 R1(ra, rb, rc, rd, re, rf, rg, rh, k_even, KL, 0, 0, IW, _, 0)
402 R1(rd, ra, rb, rc, rh, re, rf, rg, k_odd, _, 1, 1, IW, _, 0)
403 R1(rc, rd, ra, rb, rg, rh, re, rf, k_even, KL, 2, 2, IW, _, 0)
404 R1(rb, rc, rd, ra, rf, rg, rh, re, k_odd, _, 3, 3, IW, _, 0)
407 R1(ra, rb, rc, rd, re, rf, rg, rh, k_even, KL, 4, 0, IW, _, 0)
512 eor rb, rb, s1
518 stp ra, rb, [RSTATE, #0]
538 eor rb, rb, s1
[all …]
/linux/drivers/target/iscsi/
H A Discsi_target_configfs.c44 ssize_t rb; in lio_target_np_driver_show() local
52 return rb; in lio_target_np_driver_show()
546 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
595 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
599 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
603 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
607 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
611 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
615 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
619 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show()
[all …]
/linux/drivers/firmware/arm_scmi/
H A Draw_mode.c271 return rb; in scmi_raw_buffer_get()
280 rb->msg.len = rb->max_len; in scmi_raw_buffer_put()
309 return rb; in scmi_raw_buffer_dequeue_unlocked()
321 return rb; in scmi_raw_buffer_dequeue()
330 if (rb) in scmi_raw_buffer_queue_flush()
332 } while (rb); in scmi_raw_buffer_queue_flush()
717 return rb; in scmi_raw_message_dequeue()
755 memcpy(buf, rb->msg.buf, rb->msg.len); in scmi_raw_message_receive()
1045 if (!rb) in scmi_raw_queue_init()
1346 if (!rb) { in scmi_raw_message_report()
[all …]
/linux/fs/jffs2/
H A Dnodelist.h230 struct rb_node rb; member
271 struct rb_node rb; member
347 #define frag_next(frag) rb_entry(rb_next(&(frag)->rb), struct jffs2_node_frag, rb)
348 #define frag_prev(frag) rb_entry(rb_prev(&(frag)->rb), struct jffs2_node_frag, rb)
350 #define frag_left(frag) rb_entry((frag)->rb.rb_left, struct jffs2_node_frag, rb)
351 #define frag_right(frag) rb_entry((frag)->rb.rb_right, struct jffs2_node_frag, rb)
354 #define tn_next(tn) rb_entry(rb_next(&(tn)->rb), struct jffs2_tmp_dnode_info, rb)
355 #define tn_prev(tn) rb_entry(rb_prev(&(tn)->rb), struct jffs2_tmp_dnode_info, rb)
356 #define tn_parent(tn) rb_entry(rb_parent(&(tn)->rb), struct jffs2_tmp_dnode_info, rb)
357 #define tn_left(tn) rb_entry((tn)->rb.rb_left, struct jffs2_tmp_dnode_info, rb)
[all …]
/linux/tools/testing/selftests/bpf/benchs/
H A Drun_bench_ringbufs.sh10 for b in rb-libbpf rb-custom pb-libbpf pb-custom; do
15 for b in rb-libbpf rb-custom pb-libbpf pb-custom; do
20 for b in rb-libbpf rb-custom pb-libbpf pb-custom; do
43 for b in rb-libbpf rb-custom pb-libbpf pb-custom; do
/linux/drivers/gpu/drm/amd/display/dmub/inc/
H A Ddmub_cmd.h4799 return (rb->wrpt == rb->rptr); in dmub_rb_empty()
4813 if (rb->wrpt >= rb->rptr) in dmub_rb_full()
4814 data_count = rb->wrpt - rb->rptr; in dmub_rb_full()
4816 data_count = rb->capacity - (rb->rptr - rb->wrpt); in dmub_rb_full()
4845 if (rb->wrpt >= rb->capacity) in dmub_rb_push_front()
4846 rb->wrpt %= rb->capacity; in dmub_rb_push_front()
4862 uint8_t *dst = (uint8_t *)(rb->base_address) + rb->wrpt; in dmub_rb_out_push_front()
4872 if (rb->wrpt >= rb->capacity) in dmub_rb_out_push_front()
4873 rb->wrpt %= rb->capacity; in dmub_rb_out_push_front()
4978 if (rb->rptr >= rb->capacity) in dmub_rb_pop_front()
[all …]
/linux/net/sunrpc/xprtrdma/
H A Dverbs.c863 if (!rb) in rpcrdma_req_setup()
1238 rb = kmalloc(sizeof(*rb), XPRTRDMA_GFP_FLAGS); in rpcrdma_regbuf_alloc()
1239 if (!rb) in rpcrdma_regbuf_alloc()
1243 kfree(rb); in rpcrdma_regbuf_alloc()
1250 return rb; in rpcrdma_regbuf_alloc()
1293 rb->rg_iov.addr = ib_dma_map_single(device, rdmab_data(rb), in __rpcrdma_regbuf_dma_map()
1294 rdmab_length(rb), rb->rg_direction); in __rpcrdma_regbuf_dma_map()
1307 if (!rb) in rpcrdma_regbuf_dma_unmap()
1313 ib_dma_unmap_single(rb->rg_device, rdmab_addr(rb), rdmab_length(rb), in rpcrdma_regbuf_dma_unmap()
1321 if (rb) in rpcrdma_regbuf_free()
[all …]

12345678910>>...16