| /arch/arm/kernel/ |
| A D | perf_callchain.c | 34 user_backtrace(struct frame_tail __user *tail, in user_backtrace() argument 40 if (!access_ok(tail, sizeof(buftail))) in user_backtrace() 44 err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); in user_backtrace() 56 if (tail + 1 >= buftail.fp) in user_backtrace() 65 struct frame_tail __user *tail; in perf_callchain_user() local 72 tail = (struct frame_tail __user *)regs->ARM_fp - 1; in perf_callchain_user() 75 tail && !((unsigned long)tail & 0x3)) in perf_callchain_user() 76 tail = user_backtrace(tail, entry); in perf_callchain_user()
|
| /arch/arm64/kernel/ |
| A D | stacktrace.c | 524 if (!access_ok(tail, sizeof(buftail))) in unwind_user_frame() 543 if (tail >= buftail.fp) in unwind_user_frame() 572 if (!access_ok(tail, sizeof(buftail))) in unwind_compat_user_frame() 589 if (tail + 1 >= (struct compat_frame_tail __user *) in unwind_compat_user_frame() 606 struct frame_tail __user *tail; in arch_stack_walk_user() local 608 tail = (struct frame_tail __user *)regs->regs[29]; in arch_stack_walk_user() 609 while (tail && !((unsigned long)tail & 0x7)) in arch_stack_walk_user() 610 tail = unwind_user_frame(tail, cookie, consume_entry); in arch_stack_walk_user() 614 struct compat_frame_tail __user *tail; in arch_stack_walk_user() local 617 while (tail && !((unsigned long)tail & 0x3)) in arch_stack_walk_user() [all …]
|
| /arch/x86/platform/geode/ |
| A D | alix.c | 73 const char *tail; in alix_present() local 91 tail = p + alix_sig_len; in alix_present() 92 if ((tail[0] == '2' || tail[0] == '3' || tail[0] == '6')) { in alix_present()
|
| /arch/x86/kernel/ |
| A D | static_call.c | 114 static void __static_call_validate(u8 *insn, bool tail, bool tramp) in __static_call_validate() argument 123 if (tail) { in __static_call_validate() 142 static inline enum insn_type __sc_insn(bool null, bool tail) in __sc_insn() argument 154 return 2*tail + null; in __sc_insn() 157 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail) in arch_static_call_transform() argument 167 __static_call_validate(site, tail, false); in arch_static_call_transform() 168 __static_call_transform(site, __sc_insn(!func, tail), func, false); in arch_static_call_transform()
|
| /arch/powerpc/crypto/ |
| A D | aes-spe-glue.c | 324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt() local 325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt() 333 if (tail) { in ppc_xts_encrypt() 336 req->cryptlen - tail, req->iv); in ppc_xts_encrypt() 341 if (err || !tail) in ppc_xts_encrypt() 345 memcpy(b[1], b[0], tail); in ppc_xts_encrypt() 362 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_decrypt() local 372 if (tail) { in ppc_xts_decrypt() 380 if (err || !tail) in ppc_xts_decrypt() 394 memcpy(b[0], b[2], tail); in ppc_xts_decrypt() [all …]
|
| /arch/powerpc/platforms/pseries/ |
| A D | of_helpers.c | 22 const char *tail; in pseries_of_derive_parent() local 25 tail = kbasename(path) - 1; in pseries_of_derive_parent() 31 if (tail > path) { in pseries_of_derive_parent() 32 parent_path = kstrndup(path, tail - path, GFP_KERNEL); in pseries_of_derive_parent()
|
| /arch/arm/crypto/ |
| A D | ghash-ce-glue.c | 314 int tail, err; in gcm_encrypt() local 357 tail = walk.nbytes % AES_BLOCK_SIZE; in gcm_encrypt() 367 if (unlikely(tail && (tail == walk.nbytes || src != dst))) in gcm_encrypt() 368 src = memcpy(buf + sizeof(buf) - tail, src, tail); in gcm_encrypt() 374 if (unlikely(tail && src != dst)) in gcm_encrypt() 375 memcpy(dst, src, tail); in gcm_encrypt() 403 int tail, err, ret; in gcm_decrypt() local 451 if (unlikely(tail && (tail == walk.nbytes || src != dst))) in gcm_decrypt() 452 src = memcpy(buf + sizeof(buf) - tail, src, tail); in gcm_decrypt() 458 if (unlikely(tail && src != dst)) in gcm_decrypt() [all …]
|
| A D | aes-ce-glue.c | 399 u8 __aligned(8) tail[AES_BLOCK_SIZE]; in ctr_encrypt() 410 ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx), in ctr_encrypt() 413 crypto_xor_cpy(tdst, tsrc, tail, nbytes); in ctr_encrypt() 424 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt() local 435 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt() 451 tail = 0; in xts_encrypt() 468 if (err || likely(!tail)) in xts_encrypt() 496 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt() local 507 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt() 523 tail = 0; in xts_decrypt() [all …]
|
| A D | aes-neonbs-glue.c | 259 int tail = req->cryptlen % AES_BLOCK_SIZE; in __xts_crypt() local 268 if (unlikely(tail)) { in __xts_crypt() 274 req->cryptlen - tail, req->iv); in __xts_crypt() 286 int reorder_last_tweak = !encrypt && tail > 0; in __xts_crypt() 302 if (err || likely(!tail)) in __xts_crypt() 308 memcpy(buf + AES_BLOCK_SIZE, buf, tail); in __xts_crypt() 309 scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); in __xts_crypt() 321 AES_BLOCK_SIZE + tail, 1); in __xts_crypt()
|
| /arch/s390/kernel/ |
| A D | perf_cpum_sf.c | 252 tail = sfb->tail; in realloc_sampling_buffer() 260 (unsigned long)tail); in realloc_sampling_buffer() 277 tail_prev = tail; in realloc_sampling_buffer() 278 tail = new; in realloc_sampling_buffer() 296 tail = tail_prev; in realloc_sampling_buffer() 301 tail++; in realloc_sampling_buffer() 307 sfb->tail = tail; in realloc_sampling_buffer() 340 sfb->tail = sfb->sdbt; in alloc_sampling_buffer() 1635 tail = sfb->tail = sfb->sdbt; in aux_buffer_setup() 1649 tail = new; in aux_buffer_setup() [all …]
|
| /arch/alpha/lib/ |
| A D | clear_user.S | 44 beq $1, $tail # .. e1 : 58 $tail: 59 bne $2, 1f # e1 : is there a tail to do? 74 and $1, 7, $2 # e1 : number of bytes in tail
|
| /arch/sparc/kernel/ |
| A D | signal_32.c | 230 void __user *tail; in setup_frame() local 251 tail = sf + 1; in setup_frame() 259 __siginfo_fpu_t __user *fp = tail; in setup_frame() 260 tail += sizeof(*fp); in setup_frame() 267 __siginfo_rwin_t __user *rwp = tail; in setup_frame() 268 tail += sizeof(*rwp); in setup_frame() 325 void __user *tail; in setup_rt_frame() local 343 tail = sf + 1; in setup_rt_frame() 355 __siginfo_fpu_t __user *fp = tail; in setup_rt_frame() 356 tail += sizeof(*fp); in setup_rt_frame() [all …]
|
| A D | signal32.c | 357 void __user *tail; in setup_frame32() local 386 tail = (sf + 1); in setup_frame32() 411 __siginfo_fpu_t __user *fp = tail; in setup_frame32() 412 tail += sizeof(*fp); in setup_frame32() 419 __siginfo_rwin_t __user *rwp = tail; in setup_frame32() 420 tail += sizeof(*rwp); in setup_frame32() 492 void __user *tail; in setup_rt_frame32() local 520 tail = (sf + 1); in setup_rt_frame32() 545 __siginfo_fpu_t __user *fp = tail; in setup_rt_frame32() 546 tail += sizeof(*fp); in setup_rt_frame32() [all …]
|
| /arch/arm64/crypto/ |
| A D | aes-ce-ccm-glue.c | 196 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() local 203 tail = 0; in ccm_encrypt() 211 ce_aes_ccm_encrypt(dst, src, walk.nbytes - tail, in ccm_encrypt() 219 err = skcipher_walk_done(&walk, tail); in ccm_encrypt() 263 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_decrypt() local 270 tail = 0; in ccm_decrypt() 278 ce_aes_ccm_decrypt(dst, src, walk.nbytes - tail, in ccm_decrypt() 286 err = skcipher_walk_done(&walk, tail); in ccm_decrypt()
|
| A D | aes-neonbs-glue.c | 283 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); in __xts_crypt() local 297 if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) { in __xts_crypt() 310 tail = 0; in __xts_crypt() 353 if (err || likely(!tail)) in __xts_crypt() 361 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in __xts_crypt()
|
| /arch/um/drivers/ |
| A D | line.c | 43 n = line->head - line->tail; in write_room() 98 line->tail = line->buffer; in buffer_data() 107 memcpy(line->tail, buf, len); in buffer_data() 108 line->tail += len; in buffer_data() 112 memcpy(line->tail, buf, end); in buffer_data() 115 line->tail = line->buffer + len - end; in buffer_data() 137 if (line->tail < line->head) { in flush_buffer() 157 count = line->tail - line->head; in flush_buffer() 165 return line->head == line->tail; in flush_buffer() 194 if (line->head != line->tail) in line_write() [all …]
|
| /arch/sparc/boot/ |
| A D | piggyback.c | 184 int image, tail; in main() local 255 if ((tail = open(argv[4], O_RDONLY)) < 0) in main() 257 while ((i = read(tail, buffer, 1024)) > 0) in main() 262 if (close(tail) < 0) in main()
|
| /arch/powerpc/lib/ |
| A D | qspinlock.c | 122 static __always_inline u32 trylock_clean_tail(struct qspinlock *lock, u32 tail) in trylock_clean_tail() argument 145 : "r" (&lock->val), "r"(tail), "r" (newval), in trylock_clean_tail() 161 static __always_inline u32 publish_tail_cpu(struct qspinlock *lock, u32 tail) in publish_tail_cpu() argument 175 : "r" (&lock->val), "r" (tail), "r"(_Q_TAIL_CPU_MASK) in publish_tail_cpu() 530 u32 val, old, tail; in queued_spin_lock_mcs_queue() local 561 tail = encode_tail_cpu(node->cpu); in queued_spin_lock_mcs_queue() 568 old = publish_tail_cpu(lock, tail); in queued_spin_lock_mcs_queue() 660 old = trylock_clean_tail(lock, tail); in queued_spin_lock_mcs_queue() 666 if ((old & _Q_TAIL_CPU_MASK) == tail) in queued_spin_lock_mcs_queue()
|
| /arch/powerpc/kernel/ |
| A D | static_call.c | 7 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail) in arch_static_call_transform() argument 18 if (site && tail) { in arch_static_call_transform()
|
| /arch/arm64/kvm/hyp/nvhe/ |
| A D | page_alloc.c | 191 struct hyp_page *tail = p + i; in hyp_split_page() local 193 tail->order = 0; in hyp_split_page() 194 hyp_set_page_refcounted(tail); in hyp_split_page()
|
| /arch/sh/mm/ |
| A D | pmb.c | 658 struct pmb_entry *tail; in pmb_merge() local 663 tail = head->link; in pmb_merge() 664 while (tail) { in pmb_merge() 665 span += tail->size; in pmb_merge() 673 if (!tail->link) in pmb_merge() 676 tail = tail->link; in pmb_merge()
|
| /arch/arm64/lib/ |
| A D | strlen.S | 146 L(tail): 177 bne L(tail) 187 b L(tail)
|
| /arch/arm/tools/ |
| A D | syscallnr.sh | 11 grep -E "^[0-9A-Fa-fXx]+[[:space:]]+" "$in" | sort -n | tail -n1 | (
|
| /arch/riscv/crypto/ |
| A D | aes-riscv64-glue.c | 358 int tail = req->cryptlen % AES_BLOCK_SIZE; in riscv64_aes_xts_crypt() local 382 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in riscv64_aes_xts_crypt() 390 req->cryptlen - tail - AES_BLOCK_SIZE, in riscv64_aes_xts_crypt() 395 tail = 0; in riscv64_aes_xts_crypt() 417 if (err || likely(!tail)) in riscv64_aes_xts_crypt() 426 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in riscv64_aes_xts_crypt()
|
| /arch/arm/mach-omap1/ |
| A D | ams-delta-fiq-handler.S | 197 ldr r10, [r9, #BUF_TAIL_OFFSET] @ get buffer tail offset 203 add r12, r12, r10, LSL #2 @ calculate buffer tail address 205 str r8, [r12] @ append it to the buffer tail 207 add r10, r10, #1 @ increment buffer tail offset
|