/linux/tools/perf/util/ |
H A D | bpf-prologue.h | 18 struct bpf_insn *new_prog, size_t *new_cnt, 28 size_t *new_cnt, in bpf__gen_prologue() argument 31 if (!new_cnt) in bpf__gen_prologue() 33 *new_cnt = 0; in bpf__gen_prologue()
|
/linux/tools/perf/arch/x86/util/ |
H A D | intel-bts.c | 283 int cnt = btsr->snapshot_ref_cnt, new_cnt = cnt * 2; in intel_bts_alloc_snapshot_refs() local 286 if (!new_cnt) in intel_bts_alloc_snapshot_refs() 287 new_cnt = 16; in intel_bts_alloc_snapshot_refs() 289 while (new_cnt <= idx) in intel_bts_alloc_snapshot_refs() 290 new_cnt *= 2; in intel_bts_alloc_snapshot_refs() 292 refs = calloc(new_cnt, sz); in intel_bts_alloc_snapshot_refs() 299 btsr->snapshot_ref_cnt = new_cnt; in intel_bts_alloc_snapshot_refs()
|
H A D | intel-pt.c | 923 int cnt = ptr->snapshot_ref_cnt, new_cnt = cnt * 2; in intel_pt_alloc_snapshot_refs() local 926 if (!new_cnt) in intel_pt_alloc_snapshot_refs() 927 new_cnt = 16; in intel_pt_alloc_snapshot_refs() 929 while (new_cnt <= idx) in intel_pt_alloc_snapshot_refs() 930 new_cnt *= 2; in intel_pt_alloc_snapshot_refs() 932 refs = calloc(new_cnt, sz); in intel_pt_alloc_snapshot_refs() 939 ptr->snapshot_ref_cnt = new_cnt; in intel_pt_alloc_snapshot_refs()
|
/linux/tools/crypto/tcrypt/ |
H A D | tcrypt_speed_compare.py | 159 new_cnt = item[f"new_{key}"] 161 new_sum += new_cnt 162 differ = round((new_cnt - base_cnt)*100/base_cnt, 2)
|
/linux/tools/perf/arch/arm64/util/ |
H A D | arm-spe.c | 345 int cnt = ptr->wrapped_cnt, new_cnt, i; in arm_spe_alloc_wrapped_array() local 356 new_cnt = idx + 1; in arm_spe_alloc_wrapped_array() 361 wrapped = reallocarray(ptr->wrapped, new_cnt, sizeof(bool)); in arm_spe_alloc_wrapped_array() 368 for (i = cnt; i < new_cnt; i++) in arm_spe_alloc_wrapped_array() 371 ptr->wrapped_cnt = new_cnt; in arm_spe_alloc_wrapped_array()
|
/linux/drivers/rtc/ |
H A D | sysfs.c | 318 size_t old_cnt = 0, add_cnt = 0, new_cnt; in rtc_add_groups() local 332 new_cnt = old_cnt + add_cnt + 1; in rtc_add_groups() 333 groups = devm_kcalloc(&rtc->dev, new_cnt, sizeof(*groups), GFP_KERNEL); in rtc_add_groups()
|
/linux/net/vmw_vsock/ |
H A D | virtio_transport.c | 293 int new_cnt; in virtio_transport_cancel_pkt() local 295 new_cnt = atomic_sub_return(cnt, &vsock->queued_replies); in virtio_transport_cancel_pkt() 296 if (new_cnt + cnt >= virtqueue_get_vring_size(rx_vq) && in virtio_transport_cancel_pkt() 297 new_cnt < virtqueue_get_vring_size(rx_vq)) in virtio_transport_cancel_pkt()
|
/linux/drivers/vhost/ |
H A D | vsock.c | 316 int new_cnt; in vhost_transport_cancel_pkt() local 318 new_cnt = atomic_sub_return(cnt, &vsock->queued_replies); in vhost_transport_cancel_pkt() 319 if (new_cnt + cnt >= tx_vq->num && new_cnt < tx_vq->num) in vhost_transport_cancel_pkt()
|
/linux/tools/lib/bpf/ |
H A D | linker.c | 250 size_t new_cnt = linker->sec_cnt ? linker->sec_cnt + 1 : 2; in add_dst_sec() local 252 secs = libbpf_reallocarray(secs, new_cnt, sizeof(*secs)); in add_dst_sec() 260 linker->sec_cnt = new_cnt; in add_dst_sec() 262 sec = &linker->secs[new_cnt - 1]; in add_dst_sec() 263 sec->id = new_cnt - 1; in add_dst_sec() 519 size_t new_cnt = obj->sec_cnt ? obj->sec_cnt + 1 : 2; in add_src_sec() local 521 secs = libbpf_reallocarray(secs, new_cnt, sizeof(*secs)); in add_src_sec() 526 memset(secs + obj->sec_cnt, 0, (new_cnt - obj->sec_cnt) * sizeof(*secs)); in add_src_sec() 529 obj->sec_cnt = new_cnt; in add_src_sec() 531 sec = &obj->secs[new_cnt - 1]; in add_src_sec() [all …]
|
H A D | btf.c | 147 size_t new_cnt; in libbpf_add_mem() local 157 new_cnt = *cap_cnt; in libbpf_add_mem() 158 new_cnt += new_cnt / 4; /* expand by 25% */ in libbpf_add_mem() 159 if (new_cnt < 16) /* but at least 16 elements */ in libbpf_add_mem() 160 new_cnt = 16; in libbpf_add_mem() 161 if (new_cnt > max_cnt) /* but not exceeding a set limit */ in libbpf_add_mem() 162 new_cnt = max_cnt; in libbpf_add_mem() 163 if (new_cnt < cur_cnt + add_cnt) /* also ensure we have enough memory */ in libbpf_add_mem() 164 new_cnt = cur_cnt + add_cnt; in libbpf_add_mem() 166 new_data = libbpf_reallocarray(*data, new_cnt, elem_sz); in libbpf_add_mem() [all …]
|
H A D | usdt.c | 856 size_t new_cnt = man->free_spec_cnt + usdt_link->spec_cnt; in bpf_link_usdt_detach() local 859 new_free_ids = libbpf_reallocarray(man->free_spec_ids, new_cnt, in bpf_link_usdt_detach() 870 if (new_free_ids || new_cnt == 0) { in bpf_link_usdt_detach() 874 man->free_spec_cnt = new_cnt; in bpf_link_usdt_detach()
|
H A D | libbpf.c | 6286 int new_cnt = main_prog->nr_reloc + subprog->nr_reloc; in append_subprog_relos() local 6292 relos = libbpf_reallocarray(main_prog->reloc_desc, new_cnt, sizeof(*relos)); in append_subprog_relos() 6297 if (!relos && new_cnt) in append_subprog_relos() 6303 for (i = main_prog->nr_reloc; i < new_cnt; i++) in append_subprog_relos() 6309 main_prog->nr_reloc = new_cnt; in append_subprog_relos() 6318 size_t new_cnt; in bpf_object__append_subprog_code() local 6323 new_cnt = main_prog->insns_cnt + subprog->insns_cnt; in bpf_object__append_subprog_code() 6324 insns = libbpf_reallocarray(main_prog->insns, new_cnt, sizeof(*insns)); in bpf_object__append_subprog_code() 6330 main_prog->insns_cnt = new_cnt; in bpf_object__append_subprog_code()
|
/linux/drivers/perf/hisilicon/ |
H A D | hisi_pcie_pmu.c | 449 u64 new_cnt, prev_cnt, delta; in hisi_pcie_pmu_event_update() local 453 new_cnt = hisi_pcie_pmu_read_counter(event); in hisi_pcie_pmu_event_update() 455 new_cnt) != prev_cnt); in hisi_pcie_pmu_event_update() 457 delta = (new_cnt - prev_cnt) & HISI_PCIE_MAX_PERIOD; in hisi_pcie_pmu_event_update()
|
H A D | hns3_pmu.c | 1281 u64 new_cnt, prev_cnt, delta; in hns3_pmu_read() local 1285 new_cnt = hns3_pmu_read_counter(event); in hns3_pmu_read() 1286 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, new_cnt) != in hns3_pmu_read() 1289 delta = new_cnt - prev_cnt; in hns3_pmu_read()
|
/linux/drivers/gpu/drm/msm/disp/dpu1/ |
H A D | dpu_encoder_phys_cmd.c | 87 int new_cnt; in dpu_encoder_phys_cmd_pp_tx_done_irq() local 98 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0); in dpu_encoder_phys_cmd_pp_tx_done_irq() 103 new_cnt, event); in dpu_encoder_phys_cmd_pp_tx_done_irq()
|
/linux/drivers/perf/ |
H A D | cxl_pmu.c | 685 u64 new_cnt, prev_cnt, delta; in __cxl_pmu_read() local 689 new_cnt = cxl_pmu_read_counter(event); in __cxl_pmu_read() 690 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, new_cnt) != prev_cnt); in __cxl_pmu_read() 696 delta = (new_cnt - prev_cnt) & GENMASK_ULL(info->counter_width - 1, 0); in __cxl_pmu_read()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ras.h | 755 unsigned long *new_cnt);
|
H A D | amdgpu_ras.c | 2710 unsigned long *new_cnt) in amdgpu_ras_save_bad_pages() argument 2718 if (new_cnt) in amdgpu_ras_save_bad_pages() 2719 *new_cnt = 0; in amdgpu_ras_save_bad_pages() 2730 if (new_cnt) in amdgpu_ras_save_bad_pages() 2731 *new_cnt = save_count / adev->umc.retire_unit; in amdgpu_ras_save_bad_pages()
|
/linux/drivers/infiniband/hw/ocrdma/ |
H A D | ocrdma_verbs.c | 1014 int ocrdma_resize_cq(struct ib_cq *ibcq, int new_cnt, in ocrdma_resize_cq() argument 1020 if (new_cnt < 1 || new_cnt > cq->max_hw_cqe) { in ocrdma_resize_cq() 1024 ibcq->cqe = new_cnt; in ocrdma_resize_cq()
|
/linux/tools/perf/ |
H A D | builtin-inject.c | 1290 static int synthesize_id_index(struct perf_inject *inject, size_t new_cnt) in synthesize_id_index() argument 1295 size_t from = evlist->core.nr_entries - new_cnt; in synthesize_id_index()
|
/linux/kernel/bpf/ |
H A D | verifier.c | 21655 int i, ret, new_cnt; in do_check_subprogs() local 21665 new_cnt = 0; in do_check_subprogs() 21689 new_cnt++; in do_check_subprogs() 21695 if (new_cnt) in do_check_subprogs()
|