| /arch/parisc/math-emu/ |
| A D | dfcmp.c | 67 Set_status_cbit(Unordered(cond)); in dbl_fcmp() 71 Set_status_cbit(Unordered(cond)); in dbl_fcmp() 83 Set_status_cbit(Unordered(cond)); in dbl_fcmp() 98 Set_status_cbit(Equal(cond)); in dbl_fcmp() 102 Set_status_cbit(Lessthan(cond)); in dbl_fcmp() 113 Set_status_cbit(Equal(cond)); in dbl_fcmp() 120 Set_status_cbit(Lessthan(cond)); in dbl_fcmp() 132 Set_status_cbit(Lessthan(cond)); in dbl_fcmp() 136 Set_status_cbit(Greaterthan(cond)); in dbl_fcmp() 147 Set_status_cbit(Lessthan(cond)); in dbl_fcmp() [all …]
|
| A D | sfcmp.c | 37 unsigned int cond, unsigned int *status) in sgl_fcmp() argument 67 Set_status_cbit(Unordered(cond)); in sgl_fcmp() 71 Set_status_cbit(Unordered(cond)); in sgl_fcmp() 83 Set_status_cbit(Unordered(cond)); in sgl_fcmp() 98 Set_status_cbit(Equal(cond)); in sgl_fcmp() 102 Set_status_cbit(Lessthan(cond)); in sgl_fcmp() 106 Set_status_cbit(Greaterthan(cond)); in sgl_fcmp() 113 Set_status_cbit(Equal(cond)); in sgl_fcmp() 120 Set_status_cbit(Lessthan(cond)); in sgl_fcmp() 124 Set_status_cbit(Greaterthan(cond)); in sgl_fcmp() [all …]
|
| A D | float.h | 522 #define Unordered(cond) Unorderedbit(cond) 523 #define Equal(cond) Equalbit(cond) 524 #define Lessthan(cond) Lessthanbit(cond) 525 #define Greaterthan(cond) Greaterthanbit(cond) 526 #define Exception(cond) Exceptionbit(cond)
|
| /arch/arm64/kvm/hyp/ |
| A D | aarch32.c | 51 int cond; in kvm_condition_valid32() local 72 cond = kvm_vcpu_get_condition(vcpu); in kvm_condition_valid32() 73 if (cond == 0xE) in kvm_condition_valid32() 78 if (cond < 0) { in kvm_condition_valid32() 89 cond = (it >> 4); in kvm_condition_valid32() 94 if (!((cc_map[cond] >> cpsr_cond) & 1)) in kvm_condition_valid32() 112 unsigned long itbits, cond; in kvm_adjust_itstate() local 119 cond = (cpsr & 0xe000) >> 13; in kvm_adjust_itstate() 125 itbits = cond = 0; in kvm_adjust_itstate() 130 cpsr |= cond << 13; in kvm_adjust_itstate()
|
| /arch/parisc/include/asm/ |
| A D | alternative.h | 26 u16 cond; /* see ALT_COND_XXX */ member 36 #define ALTERNATIVE(cond, replacement) "!0:" \ argument 40 ".hword 1, " __stringify(cond) " !" \ 47 #define ALTERNATIVE(from, to, cond, replacement)\ argument 51 .hword (to - from)/4, cond ! \ 56 #define ALTERNATIVE_CODE(from, num_instructions, cond, new_instr_ptr)\ argument 60 .hword -num_instructions, cond ! \
|
| /arch/arm/include/asm/ |
| A D | assembler.h | 146 bl\cond trace_hardirqs_on 198 asm_trace_hardirqs_on cond=eq 489 .ifnc \cond,al 491 itt \cond 493 ittt \cond 505 add\cond \ptr, #\rept * \inc 638 ldr\cond \dst, =\imm 640 movw\cond \dst, #:lower16:\imm 641 movt\cond \dst, #:upper16:\imm 683 mov_l \tmp, \sym, \cond [all …]
|
| A D | domain.h | 121 #define TUSERCOND(instr, cond) #instr "t" #cond argument 124 #define TUSERCOND(instr, cond) #instr #cond argument
|
| A D | spinlock.h | 29 #define WFE(cond) __ALT_SMP_ASM( \ argument 30 "it " cond "\n\t" \ 31 "wfe" cond ".n", \ 36 #define WFE(cond) __ALT_SMP_ASM("wfe" cond, "nop") argument
|
| A D | vfpmacros.h | 11 .macro VFPFMRX, rd, sysreg, cond 12 vmrs\cond \rd, \sysreg 15 .macro VFPFMXR, sysreg, rd, cond 16 vmsr\cond \sysreg, \rd
|
| /arch/x86/include/asm/ |
| A D | paravirt_types.h | 414 alt, cond) \ 444 #define __PVOP_ALT_VCALL(op, alt, cond, ...) \ argument 445 (void)____PVOP_ALT_CALL(, op, alt, cond, \ 454 (void)____PVOP_ALT_CALL(, op.func, alt, cond, \ 463 __PVOP_ALT_CALL(rettype, op, alt, cond) 464 #define PVOP_ALT_VCALL0(op, alt, cond) \ argument 465 __PVOP_ALT_VCALL(op, alt, cond) 472 __PVOP_ALT_CALLEESAVE(rettype, op, alt, cond) 473 #define PVOP_ALT_VCALLEE0(op, alt, cond) \ argument 474 __PVOP_ALT_VCALLEESAVE(op, alt, cond) [all …]
|
| /arch/parisc/kernel/ |
| A D | alternative.c | 49 u16 cond; in apply_alternatives() local 54 cond = entry->cond; in apply_alternatives() 57 WARN_ON(!cond); in apply_alternatives() 59 if ((cond & ALT_COND_ALWAYS) == 0 && no_alternatives) in apply_alternatives() 63 index, cond, len, from, replacement); in apply_alternatives() 66 if ((cond & cond_check) == 0) in apply_alternatives() 84 index, cond, len, replacement, from, from); in apply_alternatives()
|
| /arch/loongarch/net/ |
| A D | bpf_jit.h | 181 static inline int invert_jmp_cond(u8 cond) in invert_jmp_cond() argument 183 switch (cond) { in invert_jmp_cond() 209 static inline void cond_jmp_offset(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, in cond_jmp_offset() argument 212 switch (cond) { in cond_jmp_offset() 257 static inline void cond_jmp_offs26(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, in cond_jmp_offs26() argument 260 cond = invert_jmp_cond(cond); in cond_jmp_offs26() 261 cond_jmp_offset(ctx, cond, rj, rd, 2); in cond_jmp_offs26() 270 static inline int emit_cond_jmp(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, in emit_cond_jmp() argument 285 cond_jmp_offs26(ctx, cond, rj, rd, jmp_offset); in emit_cond_jmp() 302 static inline int emit_tailcall_jmp(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, in emit_tailcall_jmp() argument [all …]
|
| /arch/arm/lib/ |
| A D | memcpy.S | 29 .macro ldr1b ptr reg cond=al abort 30 ldrb\cond \reg, [\ptr], #1 41 .macro str1b ptr reg cond=al abort 42 strb\cond \reg, [\ptr], #1
|
| A D | copy_from_user.S | 76 .macro ldr1b ptr reg cond=al abort 77 ldrusr \reg, \ptr, 1, \cond, abort=\abort 90 .macro str1b ptr reg cond=al abort 91 strb\cond \reg, [\ptr], #1
|
| A D | copy_to_user.S | 48 .macro ldr1b ptr reg cond=al abort 49 ldrb\cond \reg, [\ptr], #1 89 .macro str1b ptr reg cond=al abort 90 strusr \reg, \ptr, 1, \cond, abort=\abort
|
| /arch/arm/boot/compressed/ |
| A D | decompress.c | 16 # define Assert(cond,msg) {if(!(cond)) error(msg);} argument 23 # define Assert(cond,msg) argument
|
| /arch/powerpc/net/ |
| A D | bpf_jit.h | 44 #define PPC_BCC_SHORT(cond, dest) \ argument 51 EMIT(PPC_INST_BRANCH_COND | (((cond) & 0x3ff) << 16) | (offset & 0xfffc)); \ 123 #define PPC_BCC(cond, dest) do { \ argument 125 PPC_BCC_SHORT(cond, dest); \ 129 PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, CTX_NIA(ctx) + 2*4); \
|
| /arch/alpha/boot/ |
| A D | misc.c | 65 # define Assert(cond,msg) {if(!(cond)) error(msg);} argument 72 # define Assert(cond,msg) argument
|
| /arch/nios2/boot/compressed/ |
| A D | misc.c | 55 # define Assert(cond, msg) {if (!(cond)) error(msg); } argument 62 # define Assert(cond, msg) argument
|
| /arch/arc/net/ |
| A D | bpf_jit_arcv2.c | 2536 u8 cond[JCC64_NR_OF_JMPS]; member 2575 .cond = {CC_less_u, CC_great_u, CC_less_u} 2716 if (cond >= ARC_CC_LAST) in check_jcc_64() 2741 switch (cond) { in check_jmp_64() 2809 const u8 *cc = arcv2_64_jccs.jmp[cond].cond; in gen_jcc_64() 2845 switch (cond) { in gen_jmp_64() 2909 if (cond >= ARC_CC_LAST) in check_jmp_32() 2919 if (cond == ARC_CC_AL) in check_jmp_32() 2949 if (cond >= ARC_CC_LAST) { in gen_jmp_32() 2958 if (cond != ARC_CC_AL) { in gen_jmp_32() [all …]
|
| A D | bpf_jit.h | 148 bool check_jmp_32(u32 curr_off, u32 targ_off, u8 cond); 149 bool check_jmp_64(u32 curr_off, u32 targ_off, u8 cond); 150 u8 gen_jmp_32(u8 *buf, u8 rd, u8 rs, u8 cond, u32 c_off, u32 t_off); 151 u8 gen_jmp_64(u8 *buf, u8 rd, u8 rs, u8 cond, u32 c_off, u32 t_off);
|
| /arch/parisc/net/ |
| A D | bpf_jit.h | 105 #define hppa_or_cond(reg1, reg2, cond, f, target) \ argument 106 hppa_t6_insn(0x02, reg2, reg1, cond, f, 0x09, target) 109 #define hppa_and_cond(reg1, reg2, cond, f, target) \ argument 110 hppa_t6_insn(0x02, reg2, reg1, cond, f, 0x08, target) 276 static inline int invert_bpf_cond(u8 cond) in invert_bpf_cond() argument 278 switch (cond) { in invert_bpf_cond()
|
| /arch/arm/kernel/ |
| A D | phys2virt.S | 163 @ ADD | cond | 0 0 1 0 1 0 0 0 | Rn | Rd | imm12 | 164 @ SUB | cond | 0 0 1 0 0 1 0 0 | Rn | Rd | imm12 | 165 @ MOV | cond | 0 0 1 1 1 0 1 0 | Rn | Rd | imm12 | 166 @ MVN | cond | 0 0 1 1 1 1 1 0 | Rn | Rd | imm12 | 179 @ MOVW | cond | 0 0 1 1 0 0 0 0 | imm4 | Rd | imm12 |
|
| /arch/arm/mach-omap2/ |
| A D | common.h | 206 #define omap_test_timeout(cond, timeout, index) \ argument 209 if (cond) \
|
| /arch/arm/mach-bcm/ |
| A D | bcm63xx_pmb.c | 62 u32 shift, u32 mask, u32 cond) in bpcm_wr_rd_mask() argument 76 } while (((*val >> shift) & mask) != cond); in bpcm_wr_rd_mask()
|