| /arch/parisc/lib/ |
| A D | checksum.c | 35 odd = 1 & (unsigned long) buff; in do_csum() 37 result = be16_to_cpu(*buff); in do_csum() 39 buff++; in do_csum() 43 if (2 & (unsigned long) buff) { in do_csum() 47 buff += 2; in do_csum() 53 r1 = *(unsigned int *)(buff + 0); in do_csum() 54 r2 = *(unsigned int *)(buff + 4); in do_csum() 62 buff += 16; in do_csum() 67 buff += 4; in do_csum() 74 buff += 2; in do_csum() [all …]
|
| /arch/x86/lib/ |
| A D | csum-partial_64.c | 52 temp64 = update_csum_40b(temp64, buff); in csum_partial() 54 buff += 80; in csum_partial() 68 temp64 = update_csum_40b(temp64, buff); in csum_partial() 72 buff += 40; in csum_partial() 82 : [src] "r"(buff), "m"(*(const char(*)[32])buff)); in csum_partial() 83 buff += 32; in csum_partial() 90 : [src] "r"(buff), "m"(*(const char(*)[16])buff)); in csum_partial() 91 buff += 16; in csum_partial() 97 : [src] "r"(buff), "m"(*(const char(*)[8])buff)); in csum_partial() 98 buff += 8; in csum_partial() [all …]
|
| /arch/alpha/lib/ |
| A D | checksum.c | 88 odd = 1 & (unsigned long) buff; in do_csum() 90 result = *buff << 8; in do_csum() 92 buff++; in do_csum() 96 if (2 & (unsigned long) buff) { in do_csum() 100 buff += 2; in do_csum() 104 if (4 & (unsigned long) buff) { in do_csum() 108 buff += 4; in do_csum() 116 buff += 8; in do_csum() 126 buff += 4; in do_csum() 131 buff += 2; in do_csum() [all …]
|
| /arch/powerpc/kernel/ |
| A D | optprobes.c | 158 kprobe_opcode_t *buff; in arch_prepare_optimized_kprobe() local 168 buff = get_optinsn_slot(); in arch_prepare_optimized_kprobe() 169 if (!buff) in arch_prepare_optimized_kprobe() 181 b_offset = (unsigned long)buff - (unsigned long)p->addr; in arch_prepare_optimized_kprobe() 186 b_offset = (unsigned long)(buff + TMPL_RET_IDX) - nip; in arch_prepare_optimized_kprobe() 193 pr_devel("Copying template to %p, size %lu\n", buff, size); in arch_prepare_optimized_kprobe() 204 patch_imm_load_insns((unsigned long)op, 3, buff + TMPL_OP_IDX); in arch_prepare_optimized_kprobe() 237 patch_branch(buff + TMPL_RET_IDX, nip, 0); in arch_prepare_optimized_kprobe() 239 flush_icache_range((unsigned long)buff, (unsigned long)(&buff[TMPL_END_IDX])); in arch_prepare_optimized_kprobe() 241 op->optinsn.insn = buff; in arch_prepare_optimized_kprobe() [all …]
|
| A D | nvram_64.c | 181 char *buff, int length, in nvram_write_os_partition() argument 206 rc = ppc_md.nvram_write(buff, length, &tmp_index); in nvram_write_os_partition() 243 rc = ppc_md.nvram_read(buff, length, &tmp_index); in nvram_read_partition() 426 char *buff = NULL; in nvram_pstore_read() local 486 buff = kmalloc(part->size, GFP_KERNEL); in nvram_pstore_read() 488 if (!buff) in nvram_pstore_read() 492 kfree(buff); in nvram_pstore_read() 504 oops_hdr = (struct oops_log_info *)buff; in nvram_pstore_read() 517 record->buf = kmemdup(buff + hdr_size, length, GFP_KERNEL); in nvram_pstore_read() 518 kfree(buff); in nvram_pstore_read() [all …]
|
| /arch/powerpc/include/asm/ |
| A D | checksum.h | 177 sum = csum_add(sum, (__force __wsum)*(const u16 *)buff); in csum_partial() 179 sum = csum_add(sum, (__force __wsum)*(const u32 *)buff); in csum_partial() 182 *(const u16 *)(buff + 4)); in csum_partial() 185 *(const u32 *)(buff + 4)); in csum_partial() 188 *(const u16 *)(buff + 8)); in csum_partial() 191 *(const u32 *)(buff + 8)); in csum_partial() 194 *(const u16 *)(buff + 12)); in csum_partial() 197 *(const u32 *)(buff + 12)); in csum_partial() 199 sum = csum_add(sum, ip_fast_csum_nofold(buff, len >> 2)); in csum_partial() 201 sum = __csum_partial(buff, len, sum); in csum_partial() [all …]
|
| A D | nvram.h | 45 extern int nvram_write_error_log(char * buff, int length, 47 extern int nvram_read_error_log(char * buff, int length, 84 extern int nvram_read_partition(struct nvram_os_partition *part, char *buff, 90 char *buff, int length,
|
| /arch/riscv/lib/ |
| A D | csum.c | 136 offset = (unsigned long)buff & OFFSET_MASK; in do_csum_with_alignment() 137 kasan_check_read(buff, len); in do_csum_with_alignment() 138 ptr = (const unsigned long *)(buff - offset); in do_csum_with_alignment() 151 end = (const unsigned long *)(buff + len); in do_csum_with_alignment() 224 do_csum_no_alignment(const unsigned char *buff, int len) in do_csum_no_alignment() argument 229 ptr = (const unsigned long *)(buff); in do_csum_no_alignment() 232 kasan_check_read(buff, len); in do_csum_no_alignment() 234 end = (const unsigned long *)(buff + len); in do_csum_no_alignment() 291 unsigned int do_csum(const unsigned char *buff, int len) in do_csum() argument 307 return do_csum_no_alignment(buff, len); in do_csum() [all …]
|
| /arch/s390/include/asm/ |
| A D | checksum.h | 19 static inline __wsum cksm(const void *buff, int len, __wsum sum) in cksm() argument 22 .even = (unsigned long)buff, in cksm() 26 instrument_read(buff, len); in cksm() 27 kmsan_check_memory(buff, len); in cksm() 35 __wsum csum_partial(const void *buff, int len, __wsum sum); 102 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 104 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/m68k/lib/ |
| A D | checksum.c | 38 __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 117 : "=d" (sum), "=d" (len), "=a" (buff), in csum_partial() 119 : "0" (sum), "1" (len), "2" (buff) in csum_partial()
|
| /arch/loongarch/lib/ |
| A D | csum.c | 23 unsigned int __no_sanitize_address do_csum(const unsigned char *buff, int len) in do_csum() argument 32 offset = (unsigned long)buff & 7; in do_csum() 41 kasan_check_read(buff, len); in do_csum() 42 ptr = (u64 *)(buff - offset); in do_csum()
|
| /arch/m68k/include/asm/ |
| A D | checksum.h | 23 __wsum csum_partial(const void *buff, int len, __wsum sum); 113 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 115 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/x86/um/asm/ |
| A D | checksum_32.h | 8 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 10 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/sparc/include/asm/ |
| A D | checksum_64.h | 33 __wsum csum_partial(const void * buff, int len, __wsum sum); 127 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 129 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| A D | checksum_32.h | 33 __wsum csum_partial(const void *buff, int len, __wsum sum); 193 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 195 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/arm64/lib/ |
| A D | csum.c | 21 unsigned int __no_sanitize_address do_csum(const unsigned char *buff, int len) in do_csum() argument 30 offset = (unsigned long)buff & 7; in do_csum() 39 kasan_check_read(buff, len); in do_csum() 40 ptr = (u64 *)(buff - offset); in do_csum()
|
| /arch/arm/include/asm/ |
| A D | checksum.h | 28 __wsum csum_partial(const void *buff, int len, __wsum sum); 149 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 151 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/sh/include/asm/ |
| A D | checksum_32.h | 24 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 152 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 154 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/x86/include/asm/ |
| A D | checksum_32.h | 20 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 139 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 141 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/powerpc/platforms/pseries/ |
| A D | nvram.c | 133 int nvram_write_error_log(char * buff, int length, in nvram_write_error_log() argument 136 int rc = nvram_write_os_partition(&rtas_log_partition, buff, length, in nvram_write_error_log() 152 int nvram_read_error_log(char *buff, int length, in nvram_read_error_log() argument 155 return nvram_read_partition(&rtas_log_partition, buff, length, in nvram_read_error_log()
|
| /arch/xtensa/include/asm/ |
| A D | checksum.h | 30 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 168 static __inline__ __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 170 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
| /arch/nios2/include/asm/ |
| A D | checksum.h | 14 extern __wsum csum_partial(const void *buff, int len, __wsum sum); 16 extern __sum16 ip_compute_csum(const void *buff, int len);
|
| /arch/alpha/include/asm/ |
| A D | checksum.h | 35 extern __wsum csum_partial(const void *buff, int len, __wsum sum); 56 extern __sum16 ip_compute_csum(const void *buff, int len);
|
| /arch/s390/lib/ |
| A D | csum-partial.c | 81 __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 83 return csum_copy(NULL, buff, len, sum, false); in csum_partial()
|
| /arch/mips/include/asm/ |
| A D | checksum.h | 35 __wsum csum_partial(const void *buff, int len, __wsum sum); 177 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 179 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|