| /linux/fs/ntfs3/ |
| A D | run.c | 49 if (vcn < r->vcn) { in run_lookup() 65 if (vcn >= r->vcn) { in run_lookup() 74 if (vcn < r->vcn) { in run_lookup() 211 gap = vcn - r->vcn; in run_lookup_entry() 236 if (vcn > r->vcn) { in run_truncate_head() 239 r->vcn = vcn; in run_truncate_head() 414 r->vcn = vcn; in run_add_entry() 502 if (vcn > r->vcn) { in run_collapse_range() 566 if (vcn > r->vcn) in run_insert_range() 609 *vcn = r->vcn; in run_get_entry() [all …]
|
| A D | attrib.c | 72 if (vcn && (evcn < *vcn || *vcn < svcn)) in attr_load_runs() 82 vcn ? *vcn : svcn, Add2Ptr(attr, run_off), in attr_load_runs() 475 if (svcn <= vcn && vcn <= evcn) { in attr_set_size() 1031 if (vcn < svcn || evcn1 <= vcn) { in attr_data_get_block() 1327 if (evcn < vcn || vcn < svcn) { in attr_load_runs_vcn() 1358 for (vcn = from >> cluster_bits; vcn <= vcn_last; vcn += clen) { in attr_load_runs_range() 1658 if (svcn <= vcn && vcn < evcn1) { in attr_allocate_frame() 1932 if (svcn <= vcn && vcn < evcn1) { in attr_collapse_range() 2195 if (svcn <= vcn && vcn < evcn1) { in attr_punch_hole() 2433 if (svcn <= vcn && vcn < evcn1) { in attr_insert_range() [all …]
|
| A D | file.c | 197 CLST vcn, lcn, clen; in ntfs_extend_initialized_size() local 201 vcn = pos >> bits; in ntfs_extend_initialized_size() 368 for (; vcn < end; vcn += len) { in ntfs_file_mmap() 439 for (; vcn < cend_v; vcn += clen) { in ntfs_extend() 440 err = attr_data_get_block(ni, vcn, cend_v - vcn, &lcn, in ntfs_extend() 448 for (; vcn < cend; vcn += clen) { in ntfs_extend() 449 err = attr_data_get_block(ni, vcn, cend - vcn, &lcn, in ntfs_extend() 739 for (; vcn < cend_v; vcn += clen) { in ntfs_fallocate() 740 err = attr_data_get_block(ni, vcn, cend_v - vcn, in ntfs_fallocate() 749 for (; vcn < cend; vcn += clen) { in ntfs_fallocate() [all …]
|
| A D | attrlist.c | 188 u8 name_len, const CLST *vcn) in al_find_ex() argument 207 le_vcn = le64_to_cpu(le->vcn); in al_find_ex() 222 if (!vcn) in al_find_ex() 225 if (*vcn == le_vcn) in al_find_ex() 228 if (*vcn < le_vcn) in al_find_ex() 245 u8 name_len, CLST vcn) in al_find_le_to_insert() argument 260 if (!le->vcn) { in al_find_le_to_insert() 274 if (le64_to_cpu(le->vcn) >= vcn) in al_find_le_to_insert() 334 le->vcn = cpu_to_le64(svcn); in al_add_le()
|
| A D | ntfs_fs.h | 424 CLST vcn, CLST lcn, CLST len, CLST *pre_alloc, 441 CLST vcn); 470 u8 name_len, const CLST *vcn); 527 u8 name_len, const CLST *vcn, 533 const __le16 *name, u8 name_len, CLST vcn, 799 void run_truncate(struct runs_tree *run, CLST vcn); 800 void run_truncate_head(struct runs_tree *run, CLST vcn); 801 void run_truncate_around(struct runs_tree *run, CLST vcn); 818 CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf, 1134 struct runs_tree *run, CLST vcn) in attr_load_runs_attr() argument [all …]
|
| A D | frecord.c | 226 if (vcn && *vcn) in ni_find_attr() 228 } else if (!vcn) { in ni_find_attr() 308 if (vcn) { in ni_load_attr() 311 if (!next || le64_to_cpu(next->vcn) > vcn) in ni_load_attr() 691 if (le->vcn) in ni_try_remove_attr_list() 840 le->vcn = 0; in ni_create_attr_list() 2028 vcn = vcn_next; in ni_fiemap() 2059 vcn += clen; in ni_fiemap() 2084 vcn += clen; in ni_fiemap() 2295 for (vcn = vbo >> sbi->cluster_bits; vcn < end; vcn += clen) { in ni_decompress_file() [all …]
|
| A D | fsntfs.c | 815 CLST lcn, vcn, len; in ntfs_refresh_zone() local 824 vcn = bytes_to_cluster(sbi, in ntfs_refresh_zone() 1157 vcn_next = vcn + clen; in ntfs_sb_write_run() 1159 vcn != vcn_next) in ntfs_sb_write_run() 1275 vcn_next = vcn + clen; in ntfs_read_run_nb() 1277 vcn != vcn_next) { in ntfs_read_run_nb() 1393 vcn_next = vcn + clen; in ntfs_get_bh() 1395 vcn != vcn_next) { in ntfs_get_bh() 1523 vcn = vbo >> cluster_bits; in ntfs_bio_pages() 1571 vcn_next = vcn + clen; in ntfs_bio_pages() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_vcn.c | 130 adev->vcn.indirect_sram = true; in amdgpu_vcn_sw_init() 151 adev->vcn.using_unified_queue = in amdgpu_vcn_sw_init() 219 adev->vcn.inst[i].fw_shared.cpu_addr = adev->vcn.inst[i].cpu_addr + in amdgpu_vcn_sw_init() 221 adev->vcn.inst[i].fw_shared.gpu_addr = adev->vcn.inst[i].gpu_addr + in amdgpu_vcn_sw_init() 1086 struct amdgpu_vcn_inst *vcn; in amdgpu_debugfs_vcn_fwlog_read() local 1093 if (!vcn) in amdgpu_debugfs_vcn_fwlog_read() 1099 log_buf = vcn->fw_shared.cpu_addr + vcn->fw_shared.mem_size; in amdgpu_debugfs_vcn_fwlog_read() 1169 void *fw_log_cpu_addr = vcn->fw_shared.cpu_addr + vcn->fw_shared.mem_size; in amdgpu_vcn_fwlog_init() 1170 uint64_t fw_log_gpu_addr = vcn->fw_shared.gpu_addr + vcn->fw_shared.mem_size; in amdgpu_vcn_fwlog_init() 1245 if (!adev->vcn.ras) in amdgpu_vcn_ras_sw_init() [all …]
|
| A D | vcn_v2_5.c | 123 adev->vcn.num_vcn_inst = 2; in vcn_v2_5_early_init() 124 adev->vcn.harvest_config = 0; in vcn_v2_5_early_init() 125 adev->vcn.num_enc_rings = 1; in vcn_v2_5_early_init() 140 adev->vcn.num_enc_rings = 2; in vcn_v2_5_early_init() 287 adev->vcn.ip_dump = NULL; in vcn_v2_5_sw_init() 289 adev->vcn.ip_dump = ptr; in vcn_v2_5_sw_init() 328 kfree(adev->vcn.ip_dump); in vcn_v2_5_sw_fini() 1921 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v2_5_set_irq_funcs() 1924 adev->vcn.inst[i].ras_poison_irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v2_5_set_irq_funcs() 1936 if (!adev->vcn.ip_dump) in vcn_v2_5_print_ip_state() [all …]
|
| A D | vcn_v2_0.c | 113 adev->vcn.num_enc_rings = 1; in vcn_v2_0_early_init() 115 adev->vcn.num_enc_rings = 2; in vcn_v2_0_early_init() 143 &adev->vcn.inst->irq); in vcn_v2_0_sw_init() 151 &adev->vcn.inst->irq); in vcn_v2_0_sw_init() 229 adev->vcn.ip_dump = NULL; in vcn_v2_0_sw_init() 231 adev->vcn.ip_dump = ptr; in vcn_v2_0_sw_init() 263 kfree(adev->vcn.ip_dump); in vcn_v2_0_sw_fini() 854 adev->vcn.inst->dpg_sram_curr_addr = (uint32_t *)adev->vcn.inst->dpg_sram_cpu_addr; in vcn_v2_0_start_dpg_mode() 2044 if (!adev->vcn.ip_dump) in vcn_v2_0_print_ip_state() 2077 if (!adev->vcn.ip_dump) in vcn_v2_0_dump_ip_state() [all …]
|
| A D | vcn_v5_0_0.c | 100 adev->vcn.num_enc_rings = 1; in vcn_v5_0_0_early_init() 153 ring = &adev->vcn.inst[i].ring_enc[0]; in vcn_v5_0_0_sw_init() 180 adev->vcn.ip_dump = NULL; in vcn_v5_0_0_sw_init() 182 adev->vcn.ip_dump = ptr; in vcn_v5_0_0_sw_init() 220 kfree(adev->vcn.ip_dump); in vcn_v5_0_0_sw_fini() 679 …adev->vcn.inst[inst_idx].dpg_sram_curr_addr = (uint32_t *)adev->vcn.inst[inst_idx].dpg_sram_cpu_ad… in vcn_v5_0_0_start_dpg_mode() 1264 if (state == adev->vcn.cur_state) in vcn_v5_0_0_set_powergating_state() 1273 adev->vcn.cur_state = state; in vcn_v5_0_0_set_powergating_state() 1341 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v5_0_0_set_irq_funcs() 1353 if (!adev->vcn.ip_dump) in vcn_v5_0_print_ip_state() [all …]
|
| A D | vcn_v3_0.c | 130 adev->vcn.harvest_config = 0; in vcn_v3_0_early_init() 131 adev->vcn.num_enc_rings = 1; in vcn_v3_0_early_init() 141 adev->vcn.num_enc_rings = 0; in vcn_v3_0_early_init() 143 adev->vcn.num_enc_rings = 2; in vcn_v3_0_early_init() 291 adev->vcn.ip_dump = NULL; in vcn_v3_0_sw_init() 293 adev->vcn.ip_dump = ptr; in vcn_v3_0_sw_init() 334 kfree(adev->vcn.ip_dump); in vcn_v3_0_sw_fini() 2184 adev->vcn.cur_state = state; in vcn_v3_0_set_powergating_state() 2249 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v3_0_set_irq_funcs() 2262 if (!adev->vcn.ip_dump) in vcn_v3_0_print_ip_state() [all …]
|
| A D | vcn_v4_0_5.c | 117 adev->vcn.num_enc_rings = 1; in vcn_v4_0_5_early_init() 169 ring = &adev->vcn.inst[i].ring_enc[0]; in vcn_v4_0_5_sw_init() 213 adev->vcn.ip_dump = NULL; in vcn_v4_0_5_sw_init() 215 adev->vcn.ip_dump = ptr; in vcn_v4_0_5_sw_init() 256 kfree(adev->vcn.ip_dump); in vcn_v4_0_5_sw_fini() 1537 if (state == adev->vcn.cur_state) in vcn_v4_0_5_set_powergating_state() 1546 adev->vcn.cur_state = state; in vcn_v4_0_5_set_powergating_state() 1614 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v4_0_5_set_irq_funcs() 1626 if (!adev->vcn.ip_dump) in vcn_v4_0_5_print_ip_state() 1659 if (!adev->vcn.ip_dump) in vcn_v4_0_5_dump_ip_state() [all …]
|
| A D | vcn_v4_0.c | 130 adev->vcn.num_enc_rings = 1; in vcn_v4_0_early_init() 245 adev->vcn.ip_dump = NULL; in vcn_v4_0_sw_init() 247 adev->vcn.ip_dump = ptr; in vcn_v4_0_sw_init() 289 kfree(adev->vcn.ip_dump); in vcn_v4_0_sw_fini() 2052 if (state == adev->vcn.cur_state) in vcn_v4_0_set_powergating_state() 2061 adev->vcn.cur_state = state; in vcn_v4_0_set_powergating_state() 2153 adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v4_0_set_irq_funcs() 2156 adev->vcn.inst[i].ras_poison_irq.num_types = adev->vcn.num_enc_rings + 1; in vcn_v4_0_set_irq_funcs() 2168 if (!adev->vcn.ip_dump) in vcn_v4_0_print_ip_state() 2201 if (!adev->vcn.ip_dump) in vcn_v4_0_dump_ip_state() [all …]
|
| A D | vcn_v4_0_3.c | 110 adev->vcn.num_enc_rings = 1; in vcn_v4_0_3_early_init() 204 adev->vcn.ip_dump = NULL; in vcn_v4_0_3_sw_init() 206 adev->vcn.ip_dump = ptr; in vcn_v4_0_3_sw_init() 244 kfree(adev->vcn.ip_dump); in vcn_v4_0_3_sw_fini() 284 adev->vcn.inst[i].aid_id); in vcn_v4_0_3_hw_init() 1542 adev->vcn.inst[i].aid_id = in vcn_v4_0_3_set_unified_ring_funcs() 1639 if (state == adev->vcn.cur_state) in vcn_v4_0_3_set_powergating_state() 1648 adev->vcn.cur_state = state; in vcn_v4_0_3_set_powergating_state() 1743 if (!adev->vcn.ip_dump) in vcn_v4_0_3_print_ip_state() 1776 if (!adev->vcn.ip_dump) in vcn_v4_0_3_dump_ip_state() [all …]
|
| A D | vcn_v1_0.c | 142 &adev->vcn.inst->irq); in vcn_v1_0_sw_init() 168 adev->vcn.internal.scratch9 = adev->vcn.inst->external.scratch9 = in vcn_v1_0_sw_init() 170 adev->vcn.internal.data0 = adev->vcn.inst->external.data0 = in vcn_v1_0_sw_init() 172 adev->vcn.internal.data1 = adev->vcn.inst->external.data1 = in vcn_v1_0_sw_init() 174 adev->vcn.internal.cmd = adev->vcn.inst->external.cmd = in vcn_v1_0_sw_init() 176 adev->vcn.internal.nop = adev->vcn.inst->external.nop = in vcn_v1_0_sw_init() 206 adev->vcn.ip_dump = NULL; in vcn_v1_0_sw_init() 208 adev->vcn.ip_dump = ptr; in vcn_v1_0_sw_init() 233 kfree(adev->vcn.ip_dump); in vcn_v1_0_sw_fini() 1935 if (!adev->vcn.ip_dump) in vcn_v1_0_print_ip_state() [all …]
|
| A D | vega10_reg_init.c | 83 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_DOORBELL64_VCN0_1; in vega10_doorbell_index_init() 84 adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_DOORBELL64_VCN2_3; in vega10_doorbell_index_init() 85 adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_DOORBELL64_VCN4_5; in vega10_doorbell_index_init() 86 adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_DOORBELL64_VCN6_7; in vega10_doorbell_index_init()
|
| A D | vega20_reg_init.c | 89 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_VEGA20_DOORBELL64_VCN0_1; in vega20_doorbell_index_init() 90 adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_VEGA20_DOORBELL64_VCN2_3; in vega20_doorbell_index_init() 91 adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_VEGA20_DOORBELL64_VCN4_5; in vega20_doorbell_index_init() 92 adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_VEGA20_DOORBELL64_VCN6_7; in vega20_doorbell_index_init()
|
| A D | amdgpu_vcn.h | 156 *adev->vcn.inst[inst_idx].dpg_sram_curr_addr++ = \ 158 *adev->vcn.inst[inst_idx].dpg_sram_curr_addr++ = \ 198 *adev->vcn.inst[inst_idx].dpg_sram_curr_addr++ = \ 200 *adev->vcn.inst[inst_idx].dpg_sram_curr_addr++ = \ 508 void amdgpu_vcn_fwlog_init(struct amdgpu_vcn_inst *vcn); 510 uint8_t i, struct amdgpu_vcn_inst *vcn);
|
| A D | soc24.c | 77 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc24_query_video_codecs() 320 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_NAVI10_DOORBELL64_VCN0_1; in soc24_init_doorbell_index() 321 adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_NAVI10_DOORBELL64_VCN2_3; in soc24_init_doorbell_index() 322 adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_NAVI10_DOORBELL64_VCN4_5; in soc24_init_doorbell_index() 323 adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_NAVI10_DOORBELL64_VCN6_7; in soc24_init_doorbell_index()
|
| A D | soc21.c | 153 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs() 162 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_query_video_codecs() 175 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0)) { in soc21_query_video_codecs() 509 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_NAVI10_DOORBELL64_VCN0_1; in soc21_init_doorbell_index() 510 adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_NAVI10_DOORBELL64_VCN2_3; in soc21_init_doorbell_index() 511 adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_NAVI10_DOORBELL64_VCN4_5; in soc21_init_doorbell_index() 512 adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_NAVI10_DOORBELL64_VCN6_7; in soc21_init_doorbell_index() 803 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_common_late_init()
|
| A D | nv.c | 213 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in nv_query_video_codecs() 221 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs() 233 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs() 584 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_NAVI10_DOORBELL64_VCN0_1; in nv_init_doorbell_index() 585 adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_NAVI10_DOORBELL64_VCN2_3; in nv_init_doorbell_index() 586 adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_NAVI10_DOORBELL64_VCN4_5; in nv_init_doorbell_index() 587 adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_NAVI10_DOORBELL64_VCN6_7; in nv_init_doorbell_index() 953 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_common_late_init()
|
| A D | amdgpu_discovery.c | 638 adev->vcn.inst_mask &= in amdgpu_discovery_read_harvest_bit_per_ip() 644 adev->vcn.inst_mask &= in amdgpu_discovery_read_harvest_bit_per_ip() 693 adev->vcn.harvest_config |= in amdgpu_discovery_read_from_harvest_table() 698 adev->vcn.inst_mask &= in amdgpu_discovery_read_from_harvest_table() 1295 adev->vcn.inst_mask = 0; in amdgpu_discovery_reg_base_init() 1343 adev->vcn.vcn_config[adev->vcn.num_vcn_inst] = in amdgpu_discovery_reg_base_init() 1345 adev->vcn.num_vcn_inst++; in amdgpu_discovery_reg_base_init() 1346 adev->vcn.inst_mask |= in amdgpu_discovery_reg_base_init() 2477 adev->vcn.num_vcn_inst = 1; in amdgpu_discovery_set_ip_blocks() 2539 adev->vcn.num_vcn_inst = 2; in amdgpu_discovery_set_ip_blocks() [all …]
|
| A D | jpeg_v1_0.c | 607 bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.idle_work); in jpeg_v1_0_ring_begin_use() 610 mutex_lock(&adev->vcn.vcn1_jpeg1_workaround); in jpeg_v1_0_ring_begin_use() 612 if (amdgpu_fence_wait_empty(&adev->vcn.inst->ring_dec)) in jpeg_v1_0_ring_begin_use() 615 for (cnt = 0; cnt < adev->vcn.num_enc_rings; cnt++) { in jpeg_v1_0_ring_begin_use() 616 if (amdgpu_fence_wait_empty(&adev->vcn.inst->ring_enc[cnt])) in jpeg_v1_0_ring_begin_use()
|
| A D | aqua_vanjaram.c | 57 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_DOORBELL_LAYOUT1_VCN_START; in aqua_vanjaram_doorbell_index_init() 67 return (adev->xcp_mgr->num_xcps > adev->vcn.num_vcn_inst); in aqua_vanjaram_xcp_vcn_shared() 281 { VCN_HWIP, adev->vcn.inst_mask }, in aqua_vanjaram_ip_map_init() 401 num_vcn = adev->vcn.num_vcn_inst; in __aqua_vanjaram_get_xcp_ip_info() 704 adev->vcn.harvest_config = 0; in aqua_vanjaram_init_soc_config() 705 adev->vcn.num_inst_per_aid = 1; in aqua_vanjaram_init_soc_config() 706 adev->vcn.num_vcn_inst = hweight32(adev->vcn.inst_mask); in aqua_vanjaram_init_soc_config()
|