| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | vcn_v2_5.c | 803 VCN, 0, mmUVD_MPC_CNTL), in vcn_v2_5_start_dpg_mode() 848 VCN, 0, mmUVD_MASTINT_EN), in vcn_v2_5_start_dpg_mode() 1185 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1189 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1197 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1214 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1218 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1228 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1233 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() 1260 SOC15_REG_OFFSET(VCN, i, in vcn_v2_5_sriov_start() [all …]
|
| A D | vcn_v3_0.c | 371 RREG32_SOC15(VCN, i, mmUVD_STATUS))) { in vcn_v3_0_hw_fini() 969 VCN, inst_idx, mmUVD_MPC_CNTL), in vcn_v3_0_start_dpg_mode() 973 VCN, inst_idx, mmUVD_MPC_SET_MUXA0), in vcn_v3_0_start_dpg_mode() 980 VCN, inst_idx, mmUVD_MPC_SET_MUXB0), in vcn_v3_0_start_dpg_mode() 987 VCN, inst_idx, mmUVD_MPC_SET_MUX), in vcn_v3_0_start_dpg_mode() 1014 VCN, inst_idx, mmUVD_MASTINT_EN), in vcn_v3_0_start_dpg_mode() 1102 WREG32_SOC15(VCN, i, mmUVD_STATUS, tmp); in vcn_v3_0_start() 1153 WREG32_SOC15(VCN, i, mmUVD_MPC_SET_MUX, in vcn_v3_0_start() 1235 WREG32_SOC15(VCN, i, mmUVD_SCRATCH2, 0); in vcn_v3_0_start() 1237 WREG32_SOC15(VCN, i, mmUVD_RBC_RB_WPTR, in vcn_v3_0_start() [all …]
|
| A D | vcn_v1_0.c | 231 RREG32_SOC15(VCN, 0, mmUVD_STATUS))) { in vcn_v1_0_hw_fini() 454 WREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL, data); in vcn_v1_0_disable_clock_gating() 458 WREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE, data); in vcn_v1_0_disable_clock_gating() 461 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL); in vcn_v1_0_disable_clock_gating() 469 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v1_0_disable_clock_gating() 471 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_GATE); in vcn_v1_0_disable_clock_gating() 492 WREG32_SOC15(VCN, 0, mmUVD_CGC_GATE, data); in vcn_v1_0_disable_clock_gating() 494 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL); in vcn_v1_0_disable_clock_gating() 515 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v1_0_disable_clock_gating() 592 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v1_0_enable_clock_gating() [all …]
|
| A D | vcn_v2_0.c | 266 RREG32_SOC15(VCN, 0, mmUVD_STATUS))) in vcn_v2_0_hw_fini() 488 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL); in vcn_v2_0_disable_clock_gating() 495 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v2_0_disable_clock_gating() 497 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_GATE); in vcn_v2_0_disable_clock_gating() 518 WREG32_SOC15(VCN, 0, mmUVD_CGC_GATE, data); in vcn_v2_0_disable_clock_gating() 520 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL); in vcn_v2_0_disable_clock_gating() 541 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v2_0_disable_clock_gating() 655 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v2_0_enable_clock_gating() 678 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data); in vcn_v2_0_enable_clock_gating() 965 WREG32_SOC15(VCN, 0, mmUVD_MPC_CNTL, tmp); in vcn_v2_0_start() [all …]
|
| A D | amdgpu_vcn.h | 137 WREG32_SOC15(VCN, inst_idx, mmUVD_DPG_LMA_CTL, \ 141 RREG32_SOC15(VCN, inst_idx, mmUVD_DPG_LMA_DATA); \ 147 WREG32_SOC15(VCN, inst_idx, mmUVD_DPG_LMA_DATA, value); \ 148 WREG32_SOC15(VCN, inst_idx, mmUVD_DPG_LMA_CTL, \
|
| /linux/fs/ntfs/ |
| A D | runlist.h | 29 VCN vcn; /* vcn = Starting virtual cluster number. */ 65 extern LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn); 70 const VCN vcn); 73 const runlist_element *rl, const VCN first_vcn, 74 const VCN last_vcn); 78 const VCN first_vcn, const VCN last_vcn, VCN *const stop_vcn); 84 const VCN start, const s64 length);
|
| A D | attrib.h | 49 extern int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, 51 extern int ntfs_map_runlist(ntfs_inode *ni, VCN vcn); 53 extern LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn, 57 const VCN vcn, ntfs_attr_search_ctx *ctx); 61 const VCN lowest_vcn, const u8 *val, const u32 val_len,
|
| A D | lcnalloc.h | 30 const VCN start_vcn, const s64 count, const LCN start_lcn, 34 extern s64 __ntfs_cluster_free(ntfs_inode *ni, const VCN start_vcn, 93 static inline s64 ntfs_cluster_free(ntfs_inode *ni, const VCN start_vcn, in ntfs_cluster_free()
|
| A D | runlist.c | 527 VCN marker_vcn = 0; in ntfs_runlists_merge() 738 VCN vcn; /* Current vcn. */ in ntfs_mapping_pairs_decompress() 752 attr->data.non_resident.lowest_vcn) < (VCN)0) { in ntfs_mapping_pairs_decompress() 900 VCN max_cluster; in ntfs_mapping_pairs_decompress() 990 LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn) in ntfs_rl_vcn_to_lcn() 1039 runlist_element *ntfs_rl_find_vcn_nolock(runlist_element *rl, const VCN vcn) in ntfs_rl_find_vcn_nolock() 1118 const runlist_element *rl, const VCN first_vcn, in ntfs_get_size_for_mapping_pairs() 1119 const VCN last_vcn) in ntfs_get_size_for_mapping_pairs() 1311 const VCN first_vcn, const VCN last_vcn, VCN *const stop_vcn) in ntfs_mapping_pairs_build() 1631 const VCN start, const s64 length) in ntfs_rl_punch_nolock() [all …]
|
| A D | types.h | 29 typedef s64 VCN; typedef
|
| A D | aops.c | 167 VCN vcn; in ntfs_read_block() 242 vcn = (VCN)iblock << blocksize_bits >> in ntfs_read_block() 244 vcn_ofs = ((VCN)iblock << blocksize_bits) & in ntfs_read_block() 532 VCN vcn; in ntfs_write_block() 699 vcn = (VCN)block << blocksize_bits; in ntfs_write_block() 1002 VCN vcn; in ntfs_write_mst_block() 1008 vcn = (VCN)block << bh_size_bits; in ntfs_write_mst_block()
|
| A D | attrib.c | 70 int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx) in ntfs_map_runlist_nolock() 72 VCN end_vcn; in ntfs_map_runlist_nolock() 100 VCN allocated_size_vcn; in ntfs_map_runlist_nolock() 284 int ntfs_map_runlist(ntfs_inode *ni, VCN vcn) in ntfs_map_runlist() 327 LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn, in ntfs_attr_vcn_to_lcn_nolock() 450 runlist_element *ntfs_attr_find_vcn_nolock(ntfs_inode *ni, const VCN vcn, in ntfs_attr_find_vcn_nolock() 845 const IGNORE_CASE_BOOL ic, const VCN lowest_vcn, in ntfs_external_attr_find() 1175 const VCN lowest_vcn, const u8 *val, const u32 val_len, in ntfs_attr_lookup() 1893 VCN vcn; in ntfs_attr_extend_allocation()
|
| A D | compress.c | 478 VCN vcn; in ntfs_read_compressed_block() 481 VCN start_vcn = (((s64)index << PAGE_SHIFT) & ~cb_size_mask) >> in ntfs_read_compressed_block() 487 VCN end_vcn = ((((s64)(index + 1UL) << PAGE_SHIFT) + cb_size - 1) in ntfs_read_compressed_block()
|
| A D | lcnalloc.c | 132 runlist_element *ntfs_cluster_alloc(ntfs_volume *vol, const VCN start_vcn, in ntfs_cluster_alloc() 835 s64 __ntfs_cluster_free(ntfs_inode *ni, const VCN start_vcn, s64 count, in __ntfs_cluster_free() 916 VCN vcn; in __ntfs_cluster_free()
|
| A D | file.c | 571 VCN vcn, highest_vcn = 0, cpos, cend, bh_cpos, bh_cend; in ntfs_prepare_pages_for_non_resident_write() 643 VCN cdelta; in ntfs_prepare_pages_for_non_resident_write() 1734 VCN last_vcn; in ntfs_perform_write() 1774 VCN vcn; in ntfs_perform_write()
|
| A D | mft.c | 525 VCN vcn; in ntfs_sync_mft_mirror() 531 vcn = ((VCN)mft_no << vol->mft_record_size_bits) + in ntfs_sync_mft_mirror() 718 VCN vcn; in write_mft_record_nolock() 724 vcn = ((VCN)ni->mft_no << vol->mft_record_size_bits) + in write_mft_record_nolock() 1712 VCN old_last_vcn; in ntfs_mft_data_extend_allocation_nolock()
|
| A D | index.c | 108 VCN vcn, old_vcn; in ntfs_index_lookup()
|
| A D | dir.c | 80 VCN vcn, old_vcn; in ntfs_lookup_inode_by_name() 634 VCN vcn, old_vcn;
|
| A D | logfile.c | 714 VCN vcn, end_vcn; in ntfs_empty_logfile()
|
| A D | inode.c | 1749 VCN next_vcn, last_vcn, highest_vcn; in ntfs_read_inode_mount() 2347 VCN highest_vcn; in ntfs_truncate()
|