| /kernel/dma/ |
| A D | coherent.c | 79 if (!mem) in _dma_release_coherent_memory() 84 kfree(mem); in _dma_release_coherent_memory() 124 if (IS_ERR(mem)) in dma_declare_coherent_memory() 155 pageno = bitmap_find_free_region(mem->bitmap, mem->size, order); in __dma_alloc_from_coherent() 192 if (!mem) in dma_alloc_from_dev_coherent() 202 if (mem && vaddr >= mem->virt_base && vaddr < in __dma_release_from_coherent() 203 (mem->virt_base + ((dma_addr_t)mem->size << PAGE_SHIFT))) { in __dma_release_from_coherent() 237 if (mem && vaddr >= mem->virt_base && vaddr + size <= in __dma_mmap_from_coherent() 238 (mem->virt_base + ((dma_addr_t)mem->size << PAGE_SHIFT))) { in __dma_mmap_from_coherent() 316 if (IS_ERR(mem)) in dma_init_global_coherent() [all …]
|
| A D | swiotlb.c | 263 if (!mem->nslabs || mem->late_alloc) in swiotlb_update_mem_attributes() 277 mem->end = mem->start + bytes; in swiotlb_init_io_tlb_pool() 280 mem->area_nslabs = nslabs / mem->nareas; in swiotlb_init_io_tlb_pool() 540 tbl_size = PAGE_ALIGN(mem->end - mem->start); in swiotlb_exit() 552 array_size(sizeof(*mem->areas), mem->nareas)); in swiotlb_exit() 557 memset(mem, 0, sizeof(*mem)); in swiotlb_exit() 1382 if (!mem || !mem->nslabs) { in swiotlb_tbl_map_single() 1408 size, mem->nslabs, mem_used(mem)); in swiotlb_tbl_map_single() 1633 return mem && mem->nslabs; in is_swiotlb_active() 1759 if (!mem) in swiotlb_alloc() [all …]
|
| /kernel/ |
| A D | crash_core.c | 189 nr_phdr += mem->nr_ranges; in crash_prepare_elf64_headers() 285 end = mem->ranges[i].end; in crash_exclude_mem_range() 310 memmove(&mem->ranges[i], &mem->ranges[i + 1], in crash_exclude_mem_range() 311 (mem->nr_ranges - (i + 1)) * sizeof(mem->ranges[i])); in crash_exclude_mem_range() 313 mem->nr_ranges--; in crash_exclude_mem_range() 316 if (mem->nr_ranges >= mem->max_nr_ranges) in crash_exclude_mem_range() 319 memmove(&mem->ranges[i + 2], &mem->ranges[i + 1], in crash_exclude_mem_range() 320 (mem->nr_ranges - (i + 1)) * sizeof(mem->ranges[i])); in crash_exclude_mem_range() 327 mem->nr_ranges++; in crash_exclude_mem_range() 594 unsigned long mem; in crash_handle_hotplug_event() local [all …]
|
| A D | kexec_file.c | 510 kbuf->mem = temp_start; in locate_mem_hole_top_down() 552 kbuf->mem = temp_start; in locate_mem_hole_bottom_up() 672 unsigned long mem; in kexec_alloc_contig() local 689 mem = page_to_boot_pfn(p) << PAGE_SHIFT; in kexec_alloc_contig() 691 if (kimage_is_destination_range(kbuf->image, mem, mem + kbuf->memsz)) { in kexec_alloc_contig() 719 if (kbuf->mem != KEXEC_BUF_MEM_UNKNOWN) in kexec_locate_mem_hole() 793 ksegment->mem = kbuf->mem; in kexec_add_buffer() 864 sha_regions[j].start = ksegment->mem; in kexec_calculate_store_digests() 976 bss_addr = kbuf->mem + kbuf->bufsz; in kexec_purgatory_setup_sechdrs() 1012 kbuf->image->start += kbuf->mem + offset; in kexec_purgatory_setup_sechdrs() [all …]
|
| A D | kexec_core.c | 132 mstart = image->segment[i].mem; in sanity_check_segment_list() 151 mstart = image->segment[i].mem; in sanity_check_segment_list() 156 pstart = image->segment[j].mem; in sanity_check_segment_list() 204 mstart = image->segment[i].mem; in sanity_check_segment_list() 222 accept_memory(image->segment[i].mem, image->segment[i].memsz); in sanity_check_segment_list() 269 mstart = image->segment[i].mem; in kimage_is_destination_range() 438 mstart = image->segment[i].mem; in kimage_alloc_crash_control_pages() 758 maddr = segment->mem; in kimage_load_cma_segment() 816 maddr = segment->mem; in kimage_load_normal_segment() 895 maddr = segment->mem; in kimage_load_crash_segment()
|
| A D | kexec_handover.c | 334 const phys_addr_t *mem; in kho_mem_deserialize() local 337 mem = fdt_getprop(fdt, 0, PROP_PRESERVED_MEMORY_MAP, &len); in kho_mem_deserialize() 339 if (!mem || len != sizeof(*mem)) { in kho_mem_deserialize() 344 chunk = *mem ? phys_to_virt(*mem) : NULL; in kho_mem_deserialize() 1225 .mem = KEXEC_BUF_MEM_UNKNOWN, in kho_fill_kimage()
|
| A D | kexec.c | 299 ksegments[i].mem = in.mem; in COMPAT_SYSCALL_DEFINE4()
|
| A D | relay.c | 110 void *mem; in relay_alloc_buf() local 126 mem = vmap(buf->page_array, n_pages, VM_MAP, PAGE_KERNEL); in relay_alloc_buf() 127 if (!mem) in relay_alloc_buf() 131 return mem; in relay_alloc_buf()
|
| A D | crash_dump_dm_crypt.c | 423 kbuf.mem = KEXEC_BUF_MEM_UNKNOWN; in crash_load_dm_crypt_keys() 429 image->dm_crypt_keys_addr = kbuf.mem; in crash_load_dm_crypt_keys()
|
| A D | kexec_elf.c | 416 kbuf->mem = KEXEC_BUF_MEM_UNKNOWN; in kexec_elf_load() 420 load_addr = kbuf->mem; in kexec_elf_load()
|
| /kernel/module/ |
| A D | kdb.c | 29 kdb_printf("%-20s%8u", mod->name, mod->mem[MOD_TEXT].size); in kdb_lsmod() 30 kdb_printf("/%8u", mod->mem[MOD_RODATA].size); in kdb_lsmod() 31 kdb_printf("/%8u", mod->mem[MOD_RO_AFTER_INIT].size); in kdb_lsmod() 32 kdb_printf("/%8u", mod->mem[MOD_DATA].size); in kdb_lsmod() 44 kdb_printf(" 0x%px", mod->mem[MOD_TEXT].base); in kdb_lsmod() 45 kdb_printf("/0x%px", mod->mem[MOD_RODATA].base); in kdb_lsmod() 46 kdb_printf("/0x%px", mod->mem[MOD_RO_AFTER_INIT].base); in kdb_lsmod() 47 kdb_printf("/0x%px", mod->mem[MOD_DATA].base); in kdb_lsmod()
|
| A D | tree_lookup.c | 81 mod->mem[type].mtn.mod = mod; in mod_tree_insert() 82 if (mod->mem[type].size) in mod_tree_insert() 83 __mod_tree_insert(&mod->mem[type].mtn, &mod_tree); in mod_tree_insert() 90 if (mod->mem[type].size) in mod_tree_remove_init() 91 __mod_tree_remove(&mod->mem[type].mtn, &mod_tree); in mod_tree_remove_init() 98 if (mod->mem[type].size) in mod_tree_remove() 99 __mod_tree_remove(&mod->mem[type].mtn, &mod_tree); in mod_tree_remove()
|
| A D | strict_rwx.c | 18 const struct module_memory *mod_mem = &mod->mem[type]; in module_set_memory() 36 const struct module_memory *mem = &mod->mem[type]; in module_enable_text_rox() local 39 if (mem->is_rox) in module_enable_text_rox() 40 ret = execmem_restore_rox(mem->base, mem->size); in module_enable_text_rox()
|
| A D | main.c | 1318 mod->mem[type].size = size; in module_memory_alloc() 1342 if (!mod->mem[type].is_rox) in module_memory_alloc() 1346 mod->mem[type].base = ptr; in module_memory_alloc() 1354 struct module_memory *mem = &mod->mem[type]; in module_memory_restore_rox() local 1356 if (mem->is_rox) in module_memory_restore_rox() 1357 execmem_restore_rox(mem->base, mem->size); in module_memory_restore_rox() 1363 struct module_memory *mem = &mod->mem[type]; in module_memory_free() local 1365 execmem_free(mem->base); in module_memory_free() 1383 lockdep_free_key_range(mod->mem[MOD_DATA].base, mod->mem[MOD_DATA].size); in free_mod_mem() 3071 mod->mem[MOD_INIT_TEXT].base + mod->mem[MOD_INIT_TEXT].size); in do_init_module() [all …]
|
| A D | debug_kmemleak.c | 18 !mod->mem[type].is_rox) in kmemleak_load_module() 19 kmemleak_no_scan(mod->mem[type].base); in kmemleak_load_module()
|
| A D | kallsyms.c | 119 struct module_memory *mod_mem_data = &mod->mem[MOD_DATA]; in layout_symtab() 120 struct module_memory *mod_mem_init_data = &mod->mem[MOD_INIT_DATA]; in layout_symtab() 178 void *data_base = mod->mem[MOD_DATA].base; in add_kallsyms() 179 void *init_data_base = mod->mem[MOD_INIT_DATA].base; in add_kallsyms() 265 mod_mem = &mod->mem[MOD_INIT_TEXT]; in find_kallsyms_symbol() 267 mod_mem = &mod->mem[MOD_TEXT]; in find_kallsyms_symbol()
|
| A D | procfs.c | 70 size += mod->mem[type].size; in module_total_size() 95 value = m->private ? NULL : mod->mem[MOD_TEXT].base; in m_show()
|
| /kernel/trace/ |
| A D | trace_seq.c | 257 void trace_seq_putmem(struct trace_seq *s, const void *mem, unsigned int len) in trace_seq_putmem() argument 269 seq_buf_putmem(&s->seq, mem, len); in trace_seq_putmem() 283 void trace_seq_putmem_hex(struct trace_seq *s, const void *mem, in trace_seq_putmem_hex() argument 300 seq_buf_putmem_hex(&s->seq, mem, len); in trace_seq_putmem_hex()
|
| A D | trace.c | 6127 entry->mod_addr = (unsigned long)mod->mem[MOD_TEXT].base; in save_mod() 9541 module_delta->delta[i] = (unsigned long)mod->mem[MOD_TEXT].base in make_mod_delta()
|
| /kernel/debug/ |
| A D | gdbstub.c | 236 char *kgdb_mem2hex(char *mem, char *buf, int count) in kgdb_mem2hex() argument 247 err = copy_from_kernel_nofault(tmp, mem, count); in kgdb_mem2hex() 265 int kgdb_hex2mem(char *buf, char *mem, int count) in kgdb_hex2mem() argument 283 return copy_to_kernel_nofault(mem, tmp_raw, count); in kgdb_hex2mem() 323 static int kgdb_ebin2mem(char *buf, char *mem, int count) in kgdb_ebin2mem() argument 335 return copy_to_kernel_nofault(mem, c, size); in kgdb_ebin2mem()
|
| /kernel/bpf/ |
| A D | cpumap.c | 198 rxq.mem.type = xdpf->mem_type; in cpu_map_bpf_prog_run_xdp()
|
| /kernel/power/ |
| A D | Kconfig | 240 Enable this with a kernel parameter like "test_suspend=mem".
|