| /qemu/ebpf/ |
| A D | ebpf_rss.c | 32 if (ctx != NULL) { in ebpf_rss_init() 33 ctx->obj = NULL; in ebpf_rss_init() 34 ctx->program_fd = -1; in ebpf_rss_init() 47 return ctx != NULL && (ctx->obj != NULL || ctx->program_fd != -1); in ebpf_rss_is_loaded() 128 ctx->obj = rss_bpf_ctx; in ebpf_rss_load() 150 ctx->obj = NULL; in ebpf_rss_load() 151 ctx->program_fd = -1; in ebpf_rss_load() 290 ebpf_rss_munmap(ctx); in ebpf_rss_unload() 292 if (ctx->obj) { in ebpf_rss_unload() 301 ctx->obj = NULL; in ebpf_rss_unload() [all …]
|
| /qemu/util/ |
| A D | async.c | 75 AioContext *ctx = bh->ctx; in aio_bh_enqueue() local 135 .ctx = ctx, in aio_bh_schedule_oneshot_full() 149 .ctx = ctx, in aio_bh_new_full() 376 laio_detach_aio_context(ctx->linux_aio, ctx); in aio_ctx_finalize() 441 ctx->thread_pool = thread_pool_new(ctx); in aio_get_thread_pool() 452 laio_attach_aio_context(ctx->linux_aio, ctx); in aio_setup_linux_aio() 477 luring_attach_aio_context(ctx->linux_io_uring, ctx); in aio_setup_linux_io_uring() 593 ctx->co_schedule_bh = aio_bh_new(ctx, co_schedule_bh_cb, ctx); in aio_context_new() 596 aio_set_event_notifier(ctx, &ctx->notifier, in aio_context_new() 694 ctx = qatomic_read(&co->ctx); in aio_co_wake() [all …]
|
| A D | aio-posix.c | 170 ctx->fdmon_ops->update(ctx, node, new_node); in aio_set_fd_handler() 175 aio_notify(ctx); in aio_set_fd_handler() 589 if (max_ns && !ctx->fdmon_ops->need_wait(ctx)) { in try_poll_mode() 659 if (timeout || ctx->fdmon_ops->need_wait(ctx)) { in aio_poll() 671 ctx->fdmon_ops->wait(ctx, &ready_list, timeout); in aio_poll() 693 ctx->poll_ns /= ctx->poll_shrink; in aio_poll() 698 trace_poll_shrink(ctx, old, ctx->poll_ns); in aio_poll() 699 } else if (ctx->poll_ns < ctx->poll_max_ns && in aio_poll() 715 if (ctx->poll_ns > ctx->poll_max_ns) { in aio_poll() 716 ctx->poll_ns = ctx->poll_max_ns; in aio_poll() [all …]
|
| /qemu/target/riscv/ |
| A D | translate.c | 136 #define get_xl(ctx) ((ctx)->xl) argument 144 #define get_address_xl(ctx) ((ctx)->address_xl) argument 147 #define mxl_memop(ctx) ((get_xl(ctx) + 1) | MO_TE) argument 159 #define get_ol(ctx) ((ctx)->ol) argument 171 #define get_xl_max(ctx) ((ctx)->misa_mxl_max) argument 244 ctx->pc_save = ctx->base.pc_next + diff; in gen_update_pc() 711 if (ctx->frm == rm && ctx->frm_valid) { in gen_set_rm_chkfrm() 1226 ctx->pc_save = ctx->base.pc_first; in riscv_tr_init_disas_context() 1283 ctx->ol = ctx->xl; in riscv_tr_translate_insn() 1285 ctx->base.pc_next += ctx->cur_insn_len; in riscv_tr_translate_insn() [all …]
|
| /qemu/target/hppa/ |
| A D | translate.c | 730 install_iaq_entries(ctx, &ctx->iaq_f, &ctx->iaq_b); in gen_excp() 803 && (((ctx->iaoq_first + ctx->iaq_b.disp) ^ ctx->iaoq_first) in use_nullify_skip() 1881 ctx->iaq_j = iaqe_branchi(ctx, disp); in do_dbranch() 1895 ctx->iaq_n = &ctx->iaq_j; in do_dbranch() 1908 gen_goto_tb(ctx, 0, &ctx->iaq_b, &ctx->iaq_j); in do_dbranch() 2002 ctx->iaq_n = &ctx->iaq_j; in do_ibranch() 2015 install_iaq_entries(ctx, &ctx->iaq_b, &ctx->iaq_j); in do_ibranch() 2745 save_gpr(ctx, a->t, ctx->zero); in trans_lci() 3966 ctx->iaq_j.base = do_ibranch_priv(ctx, ctx->iaq_j.base); in trans_be() 4640 ctx->iaoq_first = ctx->base.pc_first + ctx->privilege; in hppa_tr_init_disas_context() [all …]
|
| /qemu/target/riscv/insn_trans/ |
| A D | trans_rvd.c.inc | 28 if (ctx->cfg_ptr->ext_zdinx && (get_xl(ctx) == MXL_RV32) && \ 36 if (!has_ext(ctx, RVD) || !has_ext(ctx, RVC)) { \ 68 mark_fs_dirty(ctx); 120 mark_fs_dirty(ctx); 138 mark_fs_dirty(ctx); 156 mark_fs_dirty(ctx); 174 mark_fs_dirty(ctx); 191 mark_fs_dirty(ctx); 208 mark_fs_dirty(ctx); 225 mark_fs_dirty(ctx); [all …]
|
| A D | trans_rvzfh.c.inc | 26 if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \ 32 if (!ctx->cfg_ptr->ext_zfhmin && !ctx->cfg_ptr->ext_zfbfmin) { \ 38 if (!(ctx->cfg_ptr->ext_zfhmin || ctx->cfg_ptr->ext_zhinxmin)) { \ 63 mark_fs_dirty(ctx); 100 mark_fs_dirty(ctx); 117 mark_fs_dirty(ctx); 134 mark_fs_dirty(ctx); 151 mark_fs_dirty(ctx); 167 mark_fs_dirty(ctx); 183 mark_fs_dirty(ctx); [all …]
|
| A D | trans_rvzfa.c.inc | 34 REQUIRE_ZFA(ctx); 84 REQUIRE_ZFA(ctx); 133 REQUIRE_ZFA(ctx); 134 REQUIRE_ZFH(ctx); 183 REQUIRE_ZFA(ctx); 200 REQUIRE_ZFA(ctx); 217 REQUIRE_ZFA(ctx); 234 REQUIRE_ZFA(ctx); 251 REQUIRE_ZFA(ctx); 252 REQUIRE_ZFH(ctx); [all …]
|
| A D | trans_xthead.c.inc | 152 REQUIRE_64BIT(ctx); 153 ctx->ol = MXL_RV32; 238 REQUIRE_64BIT(ctx); 357 mark_fs_dirty(ctx); 446 REQUIRE_32BIT(ctx); 455 mark_fs_dirty(ctx); 462 REQUIRE_32BIT(ctx); 474 mark_fs_dirty(ctx); 515 ctx->ol = MXL_RV32; 523 REQUIRE_64BIT(ctx); [all …]
|
| A D | trans_rvf.c.inc | 23 ctx->virt_inst_excp = ctx->virt_enabled && ctx->cfg_ptr->ext_zfinx; \ 36 if (!has_ext(ctx, RVF) || !has_ext(ctx, RVC)) { \ 61 mark_fs_dirty(ctx); 108 mark_fs_dirty(ctx); 125 mark_fs_dirty(ctx); 142 mark_fs_dirty(ctx); 159 mark_fs_dirty(ctx); 175 mark_fs_dirty(ctx); 191 mark_fs_dirty(ctx); 207 mark_fs_dirty(ctx); [all …]
|
| A D | trans_rvk.c.inc | 65 REQUIRE_32BIT(ctx); 66 REQUIRE_ZKNE(ctx); 72 REQUIRE_32BIT(ctx); 73 REQUIRE_ZKNE(ctx); 80 REQUIRE_ZKND(ctx); 87 REQUIRE_ZKND(ctx); 94 REQUIRE_ZKNE(ctx); 101 REQUIRE_ZKNE(ctx); 108 REQUIRE_ZKND(ctx); 115 REQUIRE_ZKND(ctx); [all …]
|
| A D | trans_rvb.c.inc | 71 REQUIRE_ZBB(ctx); 87 REQUIRE_ZBB(ctx); 93 REQUIRE_ZBB(ctx); 117 REQUIRE_ZBB(ctx); 123 REQUIRE_ZBB(ctx); 129 REQUIRE_ZBB(ctx); 135 REQUIRE_ZBB(ctx); 141 REQUIRE_ZBB(ctx); 147 REQUIRE_ZBB(ctx); 167 REQUIRE_ZBS(ctx); [all …]
|
| A D | trans_rvbf16.c.inc | 40 REQUIRE_ZFBFMIN(ctx); 48 mark_fs_dirty(ctx); 55 REQUIRE_ZFBFMIN(ctx); 63 mark_fs_dirty(ctx); 72 if (opfv_narrow_check(ctx, a) && (ctx->sew == MO_16)) { 81 tcg_gen_gvec_3_ptr(vreg_ofs(ctx, a->rd), vreg_ofs(ctx, 0), 97 if (opfv_widen_check(ctx, a) && (ctx->sew == MO_16)) { 106 tcg_gen_gvec_3_ptr(vreg_ofs(ctx, a->rd), vreg_ofs(ctx, 0), 122 if (require_rvv(ctx) && vext_check_isa_ill(ctx) && (ctx->sew == MO_16) && 132 tcg_gen_gvec_4_ptr(vreg_ofs(ctx, a->rd), vreg_ofs(ctx, 0), [all …]
|
| A D | trans_rva.c.inc | 22 if (!ctx->cfg_ptr->ext_zaamo && !has_ext(ctx, RVA)) { \ 28 if (!ctx->cfg_ptr->ext_zalrsc && !has_ext(ctx, RVA)) { \ 167 REQUIRE_64BIT(ctx); 174 REQUIRE_64BIT(ctx); 181 REQUIRE_64BIT(ctx); 188 REQUIRE_64BIT(ctx); 195 REQUIRE_64BIT(ctx); 202 REQUIRE_64BIT(ctx); 209 REQUIRE_64BIT(ctx); 216 REQUIRE_64BIT(ctx); [all …]
|
| A D | trans_rvzabha.c.inc | 27 REQUIRE_ZABHA(ctx); 33 REQUIRE_ZABHA(ctx); 39 REQUIRE_ZABHA(ctx); 45 REQUIRE_ZABHA(ctx); 51 REQUIRE_ZABHA(ctx); 57 REQUIRE_ZABHA(ctx); 63 REQUIRE_ZABHA(ctx); 69 REQUIRE_ZABHA(ctx); 75 REQUIRE_ZABHA(ctx); 81 REQUIRE_ZABHA(ctx); [all …]
|
| A D | trans_rvi.c.inc | 109 if (!has_ext(ctx, RVC) && !ctx->cfg_ptr->ext_zca) { 117 gen_pc_plus_diff(succ_pc, ctx, ctx->cur_insn_len); 234 gen_goto_tb(ctx, 1, ctx->cur_insn_len); 239 if (!has_ext(ctx, RVC) && !ctx->cfg_ptr->ext_zca && 859 gen_update_pc(ctx, ctx->cur_insn_len); 883 gen_update_pc(ctx, ctx->cur_insn_len); 894 gen_update_pc(ctx, ctx->cur_insn_len); 1045 return do_csrrw(ctx, a->rd, a->csr, ctx->zero, mask); 1083 return do_csrw_i128(ctx, a->csr, src, ctx->zero); 1134 return do_csrrw(ctx, a->rd, a->csr, ctx->zero, mask); [all …]
|
| /qemu/target/mips/tcg/ |
| A D | mips16e_translate.c.inc | 136 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL | 184 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL | 190 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL | 196 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL | 202 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL | 460 ctx->opcode = (ctx->opcode << 16) | extend; 680 gen_addiupc(ctx, rx, ((uint8_t) ctx->opcode) << 2, 0, 0); 862 gen_ld(ctx, OPC_LW, rx, 29, ((uint8_t)ctx->opcode) << 2); 874 gen_ld(ctx, OPC_LWPC, rx, 0, ((uint8_t)ctx->opcode) << 2); 890 gen_st(ctx, OPC_SW, rx, 29, ((uint8_t)ctx->opcode) << 2); [all …]
|
| A D | nanomips_translate.c.inc | 2293 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); 2371 gen_goto_tb(ctx, 0, ctx->btarget); 2378 gen_goto_tb(ctx, 1, ctx->btarget); 2381 gen_goto_tb(ctx, 0, ctx->base.pc_next + 4); 2428 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); 2440 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); 2443 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); 2450 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); 2469 gen_goto_tb(ctx, 0, ctx->btarget); 2543 gen_goto_tb(ctx, 1, ctx->btarget); [all …]
|
| A D | translate.c | 1286 ctx->saved_hflags = ctx->hflags; in save_cpu_state() 1301 ctx->saved_hflags = ctx->hflags; in restore_cpu_state() 3999 gen_lxl(ctx, t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL); in gen_loongson_lswc2() 4010 gen_lxr(ctx, t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UL); in gen_loongson_lswc2() 4020 gen_lxl(ctx, t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UQ); in gen_loongson_lswc2() 4028 gen_lxr(ctx, t1, t0, ctx->mem_idx, mo_endian(ctx) | MO_UQ); in gen_loongson_lswc2() 11012 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); in gen_compute_compact_branch() 11023 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); in gen_compute_compact_branch() 11035 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); in gen_compute_compact_branch() 11039 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset); in gen_compute_compact_branch() [all …]
|
| /qemu/target/rx/ |
| A D | disas.c | 42 ctx->dis->read_memory_func(addr++, &ctx->bytes[i - 1], 1, ctx->dis); in decode_load_bytes() 46 ctx->len = n; in decode_load_bytes() 61 ctx->dis->read_memory_func(addr, ctx->bytes + len, 1, ctx->dis); in li() 67 ctx->dis->read_memory_func(addr, ctx->bytes + len, 2, ctx->dis); in li() 73 ctx->dis->read_memory_func(addr, ctx->bytes + len, 3, ctx->dis); in li() 79 ctx->dis->read_memory_func(addr, ctx->bytes + len, 4, ctx->dis); in li() 110 ctx->dis->fprintf_func(ctx->dis->stream, "%02x ", ctx->bytes[i]); in dump_bytes() 155 ctx->dis->read_memory_func(addr, ctx->bytes + len, 1, ctx->dis); in rx_index_addr() 162 ctx->dis->read_memory_func(addr, ctx->bytes + len, 2, ctx->dis); in rx_index_addr() 1431 ctx.pc = ctx.addr = addr; in print_insn_rx() [all …]
|
| /qemu/target/hexagon/ |
| A D | translate.c | 88 ctx->future_vregs_num[ctx->future_vregs_idx + i] = regnum++; in ctx_future_vreg_off() 110 ctx->tmp_vregs_num[ctx->tmp_vregs_idx + i] = regnum++; in ctx_tmp_vreg_off() 164 gen_goto_tb(ctx, 0, ctx->branch_dest, true); in gen_end_tb() 166 gen_goto_tb(ctx, 1, ctx->next_PC, false); in gen_end_tb() 168 gen_goto_tb(ctx, 0, ctx->branch_dest, true); in gen_end_tb() 179 gen_goto_tb(ctx, 0, ctx->base.tb->pc, true); in gen_end_tb() 181 gen_goto_tb(ctx, 1, ctx->next_PC, false); in gen_end_tb() 383 if (ctx->read_after_write || ctx->has_hvx_overlap) { in need_commit() 421 ctx->need_commit = need_commit(ctx); in analyze_packet() 591 ctx->insn->generate(ctx); in gen_insn() [all …]
|
| A D | translate.h | 86 ctx->preg_log[ctx->preg_log_idx] = pnum; in ctx_log_pred_write() 87 ctx->preg_log_idx++; in ctx_log_pred_write() 95 ctx->read_after_write = true; in ctx_log_pred_read() 113 ctx->reg_log[ctx->reg_log_idx] = rnum; in ctx_log_reg_write() 114 ctx->reg_log_idx++; in ctx_log_reg_write() 144 ctx_log_reg_read(ctx, rnum); in ctx_log_reg_read_pair() 145 ctx_log_reg_read(ctx, rnum + 1); in ctx_log_reg_read_pair() 174 ctx->vreg_log[ctx->vreg_log_idx] = rnum; in ctx_log_vreg_write() 175 ctx->vreg_log_idx++; in ctx_log_vreg_write() 252 ctx->qreg_log[ctx->qreg_log_idx] = rnum; in ctx_log_qreg_write() [all …]
|
| /qemu/target/ppc/ |
| A D | spr_common.h | 81 void spr_noaccess(DisasContext *ctx, int gprn, int sprn); 91 void spr_write_PMC(DisasContext *ctx, int sprn, int gprn); 93 void spr_read_xer(DisasContext *ctx, int gprn, int sprn); 94 void spr_write_xer(DisasContext *ctx, int sprn, int gprn); 95 void spr_read_lr(DisasContext *ctx, int gprn, int sprn); 96 void spr_write_lr(DisasContext *ctx, int sprn, int gprn); 97 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn); 102 void spr_read_PMC(DisasContext *ctx, int gprn, int sprn); 105 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn); 106 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn); [all …]
|
| A D | translate.c | 340 gen_exception_nip(ctx, excp, ctx->cia); in gen_exception() 1477 if (unlikely(ctx->pr || !ctx->hv)) {\ 1489 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 3669 gen_setlr(ctx, ctx->base.pc_next); in gen_b() 3707 gen_setlr(ctx, ctx->base.pc_next); in gen_bcond() 3782 gen_goto_tb(ctx, 0, ctx->cia + li); in gen_bcond() 3797 gen_goto_tb(ctx, 1, ctx->base.pc_next); in gen_bcond() 3971 gen_update_nip(ctx, ctx->cia); in gen_scv() 4211 gen_update_nip(ctx, ctx->base.pc_next); in gen_mtmsrd() 4903 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ [all …]
|
| /qemu/target/ppc/translate/ |
| A D | spe-impl.c.inc | 325 if (rD(ctx->opcode) == rA(ctx->opcode)) { 622 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr); 680 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr); 687 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr); 719 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr); 721 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr); 730 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); 735 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); 750 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); 752 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); [all …]
|