| /tools/testing/cxl/test/ |
| A D | mock.c | 52 if (ops) in __wrap_is_acpi_device_node() 70 if (ops) in __wrap_acpi_table_parse_cedt() 90 if (ops) in __wrap_acpi_evaluate_integer() 142 if (ops && ops->is_mock_port(port->uport_dev)) in __wrap_devm_cxl_setup_hdm() 157 if (ops && ops->is_mock_port(port->uport_dev)) in __wrap_devm_cxl_add_passthrough_decoder() 174 if (ops && ops->is_mock_port(port->uport_dev)) in __wrap_devm_cxl_enumerate_decoders() 204 if (ops && ops->is_mock_dev(cxlds->dev)) in __wrap_cxl_await_media_ready() 221 if (ops && ops->is_mock_dev(cxlds->dev)) in __wrap_cxl_hdm_decode_init() 237 if (ops && ops->is_mock_dev(cxlds->dev)) in __wrap_cxl_dvsec_rr_decode() 256 if (ops && ops->is_mock_port(dport_dev)) { in __wrap_devm_cxl_add_rch_dport() [all …]
|
| /tools/perf/arch/x86/annotate/ |
| A D | instructions.c | 11 { .name = "adc", .ops = &mov_ops, }, 12 { .name = "add", .ops = &mov_ops, }, 13 { .name = "addsd", .ops = &mov_ops, }, 14 { .name = "and", .ops = &mov_ops, }, 17 { .name = "bsr", .ops = &mov_ops, }, 18 { .name = "bt", .ops = &mov_ops, }, 19 { .name = "btr", .ops = &mov_ops, }, 20 { .name = "bts", .ops = &mov_ops, }, 25 { .name = "cmp", .ops = &mov_ops, }, 28 { .name = "cs", .ops = &mov_ops, }, [all …]
|
| /tools/perf/arch/loongarch/annotate/ |
| A D | instructions.c | 17 c = strchr(ops->raw, '#'); in loongarch_call__parse() 38 if (ops->target.name == NULL) in loongarch_call__parse() 66 ops->jump.raw_comment = strchr(ops->raw, arch->objdump.comment_char); in loongarch_jump__parse() 67 ops->jump.raw_func_start = strchr(ops->raw, '<'); in loongarch_jump__parse() 69 if (ops->jump.raw_func_start && c > ops->jump.raw_func_start) in loongarch_jump__parse() 75 ops->target.addr = strtoull(ops->raw, NULL, 16); in loongarch_jump__parse() 87 if (!ops->target.outside) { in loongarch_jump__parse() 105 struct ins_ops *ops = NULL; in loongarch__associate_ins_ops() local 108 ops = &loongarch_call_ops; in loongarch__associate_ins_ops() 110 ops = &ret_ops; in loongarch__associate_ins_ops() [all …]
|
| /tools/lib/thermal/ |
| A D | events.c | 27 struct thermal_events_ops *ops = &thp->th->ops->events; in handle_thermal_event() local 62 return ops->trip_add(nla_get_u32(attrs[THERMAL_GENL_ATTR_TZ_ID]), in handle_thermal_event() 128 enabled_ops[THERMAL_GENL_EVENT_TZ_CREATE] = !!ops->tz_create; in thermal_events_ops_init() 129 enabled_ops[THERMAL_GENL_EVENT_TZ_DELETE] = !!ops->tz_delete; in thermal_events_ops_init() 130 enabled_ops[THERMAL_GENL_EVENT_TZ_DISABLE] = !!ops->tz_disable; in thermal_events_ops_init() 131 enabled_ops[THERMAL_GENL_EVENT_TZ_ENABLE] = !!ops->tz_enable; in thermal_events_ops_init() 132 enabled_ops[THERMAL_GENL_EVENT_TZ_TRIP_UP] = !!ops->trip_high; in thermal_events_ops_init() 133 enabled_ops[THERMAL_GENL_EVENT_TZ_TRIP_DOWN] = !!ops->trip_low; in thermal_events_ops_init() 135 enabled_ops[THERMAL_GENL_EVENT_TZ_TRIP_ADD] = !!ops->trip_add; in thermal_events_ops_init() 137 enabled_ops[THERMAL_GENL_EVENT_CDEV_ADD] = !!ops->cdev_add; in thermal_events_ops_init() [all …]
|
| /tools/perf/arch/s390/annotate/ |
| A D | instructions.c | 13 tok = strchr(ops->raw, ','); in s390_call__parse() 37 if (ops->target.name == NULL) in s390_call__parse() 64 ops->source.raw = strdup(ops->raw); in s390_mov__parse() 95 zfree(&ops->target.raw); in s390_mov__parse() 97 zfree(&ops->source.raw); in s390_mov__parse() 115 ops = &jump_ops; in s390__associate_ins_ops() 120 ops = &s390_call_ops; in s390__associate_ins_ops() 122 ops = &ret_ops; in s390__associate_ins_ops() 130 ops = &s390_mov_ops; in s390__associate_ins_ops() 132 if (ops) in s390__associate_ins_ops() [all …]
|
| /tools/perf/arch/arm64/annotate/ |
| A D | instructions.c | 23 ops->source.raw = strdup(ops->raw); in arm64_mov__parse() 26 if (ops->source.raw == NULL) in arm64_mov__parse() 31 if (ops->target.raw == NULL) in arm64_mov__parse() 56 zfree(&ops->target.raw); in arm64_mov__parse() 58 zfree(&ops->source.raw); in arm64_mov__parse() 73 struct ins_ops *ops; in arm64__associate_instruction_ops() local 77 ops = &jump_ops; in arm64__associate_instruction_ops() 79 ops = &call_ops; in arm64__associate_instruction_ops() 81 ops = &ret_ops; in arm64__associate_instruction_ops() 83 ops = &arm64_mov_ops; in arm64__associate_instruction_ops() [all …]
|
| /tools/testing/selftests/mm/ |
| A D | khugepaged.c | 650 ops, true); in collapse_full() 671 1, ops, true); in collapse_single_pte_entry() 687 if (is_tmpfs(ops)) { in collapse_max_ptes_none() 777 1, ops, true); in collapse_max_ptes_swap() 788 p = alloc_hpage(ops); in collapse_single_pte_entry_compound() 790 if (is_tmpfs(ops)) { in collapse_single_pte_entry_compound() 816 p = alloc_hpage(ops); in collapse_full_of_compound() 878 ops, true); in collapse_compound_extreme() 935 p = alloc_hpage(ops); in collapse_fork_compound() 985 p = alloc_hpage(ops); in collapse_max_ptes_shared() [all …]
|
| /tools/perf/util/ |
| A D | unwind-libunwind.c | 19 struct unwind_libunwind_ops *ops = local_unwind_libunwind_ops; in unwind__prepare_access() local 47 ops = x86_32_unwind_libunwind_ops; in unwind__prepare_access() 50 ops = arm64_unwind_libunwind_ops; in unwind__prepare_access() 53 if (!ops) { in unwind__prepare_access() 58 maps__set_unwind_libunwind_ops(maps, ops); in unwind__prepare_access() 70 if (ops) in unwind__flush_access() 71 ops->flush_access(maps); in unwind__flush_access() 78 if (ops) in unwind__finish_access() 79 ops->finish_access(maps); in unwind__finish_access() 89 if (ops) in unwind__get_entries() [all …]
|
| A D | disasm.c | 100 ins->ops = ops; in arch__associate_ins_ops() 525 ops->locked.ops = zalloc(sizeof(*ops->locked.ops)); in lock__parse() 526 if (ops->locked.ops == NULL) in lock__parse() 529 if (disasm_line__parse(ops->raw, &ops->locked.ins.name, &ops->locked.ops->raw) < 0) in lock__parse() 532 ops->locked.ins.ops = ins__find(arch, ops->locked.ins.name, 0); in lock__parse() 538 ops->locked.ins.ops->parse(arch, ops->locked.ops, ms, NULL) < 0) in lock__parse() 544 zfree(&ops->locked.ops); in lock__parse() 565 if (ins->ops && ins->ops->free) in lock__delete() 566 ins->ops->free(ops->locked.ops); in lock__delete() 570 zfree(&ops->locked.ops); in lock__delete() [all …]
|
| A D | disasm.h | 55 struct ins_ops *ops; member 81 struct ins_operands *ops; member 91 void (*free)(struct ins_operands *ops); 92 int (*parse)(struct arch *arch, struct ins_operands *ops, struct map_symbol *ms, 95 struct ins_operands *ops, int max_ins_name); 114 struct ins_operands *ops, int max_ins_name);
|
| A D | dwarf-aux.c | 1223 ops++; in check_allowed_ops() 1233 switch (ops->atom) { in check_allowed_ops() 1242 ops++; in check_allowed_ops() 1433 Dwarf_Op *ops; in __die_find_var_reg_cb() local 1473 if (ops->atom == DW_OP_regx && ops->number == data->reg && in __die_find_var_reg_cb() 1480 if (ops->atom == DW_OP_bregx && data->reg == ops->number && in __die_find_var_reg_cb() 1528 Dwarf_Op *ops; in __die_find_var_addr_cb() local 1580 Dwarf_Op *ops; in __die_collect_vars_cb() local 1643 Dwarf_Op *ops; in __die_collect_global_vars_cb() local 1671 vt->addr = ops->number; in __die_collect_global_vars_cb() [all …]
|
| A D | trace-event-scripting.c | 31 struct scripting_ops *ops; member 38 struct scripting_ops *ops) in script_spec__new() argument 44 s->ops = ops; in script_spec__new() 65 static int script_spec_register(const char *spec, struct scripting_ops *ops) in script_spec_register() argument 73 s = script_spec__new(spec, ops); in script_spec_register() 88 return s->ops; in script_spec__lookup() 91 int script_spec__for_each(int (*cb)(struct scripting_ops *ops, const char *spec)) in script_spec__for_each() argument 97 ret = cb(s->ops, s->spec); in script_spec__for_each()
|
| /tools/usb/usbip/libsrc/ |
| A D | usbip_host_common.h | 43 struct usbip_host_driver_ops ops; member 57 if (!hdriver->ops.open) in usbip_driver_open() 59 return hdriver->ops.open(hdriver); in usbip_driver_open() 64 if (!hdriver->ops.close) in usbip_driver_close() 66 hdriver->ops.close(hdriver); in usbip_driver_close() 71 if (!hdriver->ops.refresh_device_list) in usbip_refresh_device_list() 73 return hdriver->ops.refresh_device_list(hdriver); in usbip_refresh_device_list() 79 if (!hdriver->ops.get_device) in usbip_get_device() 81 return hdriver->ops.get_device(hdriver, num); in usbip_get_device()
|
| /tools/perf/arch/csky/annotate/ |
| A D | instructions.c | 9 struct ins_ops *ops = NULL; in csky__associate_ins_ops() local 24 ops = &jump_ops; in csky__associate_ins_ops() 30 ops = &call_ops; in csky__associate_ins_ops() 34 ops = &ret_ops; in csky__associate_ins_ops() 36 if (ops) in csky__associate_ins_ops() 37 arch__associate_ins_ops(arch, name, ops); in csky__associate_ins_ops() 38 return ops; in csky__associate_ins_ops()
|
| /tools/perf/arch/riscv64/annotate/ |
| A D | instructions.c | 6 struct ins_ops *ops = NULL; in riscv64__associate_ins_ops() local 11 ops = &call_ops; in riscv64__associate_ins_ops() 13 ops = &ret_ops; in riscv64__associate_ins_ops() 15 ops = &jump_ops; in riscv64__associate_ins_ops() 19 arch__associate_ins_ops(arch, name, ops); in riscv64__associate_ins_ops() 21 return ops; in riscv64__associate_ins_ops()
|
| /tools/perf/arch/sparc/annotate/ |
| A D | instructions.c | 122 struct ins_ops *ops = NULL; in sparc__associate_instruction_ops() local 127 ops = &call_ops; in sparc__associate_instruction_ops() 131 ops = &ret_ops; in sparc__associate_instruction_ops() 133 ops = &mov_ops; in sparc__associate_instruction_ops() 144 ops = &jump_ops; in sparc__associate_instruction_ops() 146 ops = &jump_ops; in sparc__associate_instruction_ops() 150 ops = &jump_ops; in sparc__associate_instruction_ops() 154 if (ops) in sparc__associate_instruction_ops() 155 arch__associate_ins_ops(arch, name, ops); in sparc__associate_instruction_ops() 157 return ops; in sparc__associate_instruction_ops()
|
| /tools/perf/arch/mips/annotate/ |
| A D | instructions.c | 6 struct ins_ops *ops = NULL; in mips__associate_ins_ops() local 23 ops = &call_ops; in mips__associate_ins_ops() 25 ops = &ret_ops; in mips__associate_ins_ops() 27 ops = &jump_ops; in mips__associate_ins_ops() 31 arch__associate_ins_ops(arch, name, ops); in mips__associate_ins_ops() 33 return ops; in mips__associate_ins_ops()
|
| /tools/perf/arch/powerpc/util/ |
| A D | skip-callchain-idx.c | 50 Dwarf_Op *ops = &dummy; in check_return_reg() local 54 result = dwarf_frame_register(frame, ra_regno, ops_mem, &ops, &nops); in check_return_reg() 65 if ((nops != 0 || ops != NULL) && in check_return_reg() 66 !(nops == 1 && ops[0].atom == DW_OP_regx && in check_return_reg() 67 ops[0].number2 == 0 && ops[0].offset == 0)) in check_return_reg() 74 result = dwarf_frame_cfa(frame, &ops, &nops); in check_return_reg() 84 if (nops == 1 && ops[0].atom == DW_OP_bregx && ops[0].number == 1 && in check_return_reg() 85 ops[0].number2 == 0) in check_return_reg()
|
| /tools/perf/arch/arm/annotate/ |
| A D | instructions.c | 16 struct ins_ops *ops; in arm__associate_instruction_ops() local 20 ops = &call_ops; in arm__associate_instruction_ops() 22 ops = &jump_ops; in arm__associate_instruction_ops() 26 arch__associate_ins_ops(arch, name, ops); in arm__associate_instruction_ops() 27 return ops; in arm__associate_instruction_ops()
|
| /tools/testing/cxl/ |
| A D | mock_acpi.c | 14 struct cxl_mock_ops *ops = get_cxl_mock_ops(&index); in to_cxl_host_bridge() local 16 if (ops && ops->is_mock_bridge(dev)) { in to_cxl_host_bridge()
|
| /tools/perf/arch/powerpc/annotate/ |
| A D | instructions.c | 7 struct ins_ops *ops; in powerpc__associate_instruction_ops() local 19 ops = &jump_ops; in powerpc__associate_instruction_ops() 39 ops = &call_ops; in powerpc__associate_instruction_ops() 46 ops = &ret_ops; in powerpc__associate_instruction_ops() 48 arch__associate_ins_ops(arch, name, ops); in powerpc__associate_instruction_ops() 49 return ops; in powerpc__associate_instruction_ops() 264 struct annotated_op_loc *src = &loc.ops[INSN_OP_SOURCE]; in update_insn_state_powerpc() 265 struct annotated_op_loc *dst = &loc.ops[INSN_OP_TARGET]; in update_insn_state_powerpc()
|
| /tools/perf/bench/ |
| A D | futex-hash.c | 48 unsigned long ops; member 78 unsigned long ops = w->ops; /* avoid cacheline bouncing */ in workerfn() local 88 for (i = 0; i < params.nfutexes; i++, ops++) { in workerfn() 102 w->ops = ops; in workerfn() 234 worker[i].ops / bench__runtime.tv_sec : 0; in bench_futex_hash()
|
| A D | futex-lock-pi.c | 31 unsigned long ops; member 88 unsigned long ops = w->ops; in workerfn() local 117 ops++; /* account for thread's share of work */ in workerfn() 120 w->ops = ops; in workerfn() 238 worker[i].ops / bench__runtime.tv_sec : 0; in bench_futex_lock_pi()
|
| /tools/lib/perf/ |
| A D | evlist.c | 533 map = ops->get(evlist, overwrite, idx); in mmap_per_evsel() 565 if (ops->idx) in mmap_per_evsel() 566 ops->idx(evlist, evsel, mp, idx); in mmap_per_evsel() 570 if (ops->mmap(map, mp, *output, evlist_cpu) < 0) in mmap_per_evsel() 663 if (mmap_per_evsel(evlist, ops, cpu, mp, cpu, in mmap_per_cpu() 696 struct perf_evlist_mmap_ops *ops, in perf_evlist__mmap_ops() argument 702 if (!ops || !ops->get || !ops->mmap) in perf_evlist__mmap_ops() 720 return mmap_per_thread(evlist, ops, mp); in perf_evlist__mmap_ops() 722 return mmap_per_cpu(evlist, ops, mp); in perf_evlist__mmap_ops() 728 struct perf_evlist_mmap_ops ops = { in perf_evlist__mmap() local [all …]
|
| /tools/testing/nvdimm/test/ |
| A D | iomap.c | 44 struct iomap_ops *ops; in __get_nfit_res() local 46 ops = list_first_or_null_rcu(&iomap_head, typeof(*ops), list); in __get_nfit_res() 47 if (ops) in __get_nfit_res() 48 return ops->nfit_test_lookup(resource); in __get_nfit_res() 385 struct iomap_ops *ops; in __wrap_acpi_evaluate_dsm() local 388 ops = list_first_or_null_rcu(&iomap_head, typeof(*ops), list); in __wrap_acpi_evaluate_dsm() 389 if (ops) in __wrap_acpi_evaluate_dsm() 390 obj = ops->evaluate_dsm(handle, guid, rev, func, argv4); in __wrap_acpi_evaluate_dsm()
|