| /fs/ntfs3/ |
| A D | attrib.c | 1247 u64 vbo; in attr_data_read_resident() local 1261 if (vbo > data_size) in attr_data_read_resident() 1274 u64 vbo; in attr_data_write_resident() local 1445 if (vbo[1]) { in attr_wof_frame_info() 1447 vbo[0] = vbo[1] - bytes_per_off; in attr_wof_frame_info() 1451 vbo[0] = 0; in attr_wof_frame_info() 1901 if (vbo > data_size) in attr_collapse_range() 2085 valid_size = vbo; in attr_collapse_range() 2144 from = vbo; in attr_punch_hole() 2163 bytes += vbo; in attr_punch_hole() [all …]
|
| A D | dir.c | 355 const struct INDEX_HDR *hdr, u64 vbo, u64 pos, in ntfs_read_hdr() argument 376 if (vbo + off < pos) in ntfs_read_hdr() 382 ctx->pos = vbo + off; in ntfs_read_hdr() 402 u64 vbo; in ntfs_readdir() local 470 vbo = (u64)bit << index_bits; in ntfs_readdir() 471 if (vbo >= i_size) { in ntfs_readdir() 485 vbo = (u64)bit << index_bits; in ntfs_readdir() 486 if (vbo >= i_size) { in ntfs_readdir() 497 vbo + sbi->record_size, pos, name, ctx); in ntfs_readdir()
|
| A D | fsntfs.c | 774 u64 vbo; in ntfs_clear_mft_tail() local 786 vbo = (u64)from * rs; in ntfs_clear_mft_tail() 1194 lbo = vbo + sbi->mft.lbo; in ntfs_read_run_nb() 1321 off = vbo & sbi->cluster_mask; in ntfs_get_bh() 1500 vbo = vbo & ~511ull; in ntfs_bio_pages() 1501 bytes = lbo - vbo; in ntfs_bio_pages() 1503 vcn = vbo >> cluster_bits; in ntfs_bio_pages() 1508 off = vbo & sbi->cluster_mask; in ntfs_bio_pages() 1525 off = vbo & (PAGE_SIZE - 1); in ntfs_bio_pages() 1534 vbo += add; in ntfs_bio_pages() [all …]
|
| A D | frecord.c | 898 u64 vbo; in ni_ins_attr_ext() local 1904 end = vbo + len; in ni_fiemap() 1909 while (vbo < end) { in ni_fiemap() 1971 if (vbo + bytes >= end) in ni_fiemap() 1972 bytes = end - vbo; in ni_fiemap() 1982 if (vbo + dlen >= end) in ni_fiemap() 1995 vbo = valid; in ni_fiemap() 2015 vbo += bytes; in ni_fiemap() 2123 u64 vbo; in ni_decompress_file() local 2154 for (vbo = 0; vbo < i_size; vbo += bytes) { in ni_decompress_file() [all …]
|
| A D | file.c | 203 pgoff_t idx = vbo >> PAGE_SHIFT; in ntfs_zero_range() 204 u32 from = vbo & (PAGE_SIZE - 1); in ntfs_zero_range() 499 loff_t end = vbo + len; in ntfs_fallocate() 562 err = attr_punch_hole(ni, vbo, len, &frame_size); in ntfs_fallocate() 573 vbo_a = (vbo + mask) & ~mask; in ntfs_fallocate() 577 if (tmp > vbo) { in ntfs_fallocate() 578 err = ntfs_zero_range(inode, vbo, tmp); in ntfs_fallocate() 583 if (vbo < end_a && end_a < end) { in ntfs_fallocate() 617 err = attr_collapse_range(ni, vbo, len); in ntfs_fallocate() 635 err = attr_insert_range(ni, vbo, len); in ntfs_fallocate() [all …]
|
| A D | fslog.c | 949 if (off == vbo) { in alloc_rsttbl_from_idx() 971 if (off == vbo) { in alloc_rsttbl_from_idx() 1001 u32 vbo; member 1083 return vbo; in lsn_to_vbo() 1207 u64 vbo; in log_read_rst() local 1212 vbo = 0; in log_read_rst() 1215 vbo = 512; in log_read_rst() 1220 for (; vbo < log->l_size; vbo = 2 * vbo + skip, skip = 0) { in log_read_rst() 1255 info->vbo = vbo; in log_read_rst() 1477 if (end <= vbo) in next_log_lsn() [all …]
|
| A D | index.c | 263 vbo = off & ~(size_t)sbi->block_mask; in bmp_buf_get() 265 bbuf->new_valid = vbo + blocksize; in bmp_buf_get() 271 if (vbo >= valid_size) { in bmp_buf_get() 273 } else if (vbo + blocksize > valid_size) { in bmp_buf_get() 375 size_t vbo = from >> 3; in scan_nres_bitmap() local 390 vcn = vbo >> sbi->cluster_bits; in scan_nres_bitmap() 423 if (vbo >= valid_size) { in scan_nres_bitmap() 431 if (vbo + blocksize > data_size) in scan_nres_bitmap() 432 nbits = 8 * (data_size - vbo); in scan_nres_bitmap() 440 *ret += 8 * vbo; in scan_nres_bitmap() [all …]
|
| A D | bitmap.c | 508 size_t wpos, wbit, iw, vbo; in wnd_rescan() local 517 vbo = 0; in wnd_rescan() 528 vbo * 8 - prev_tail, in wnd_rescan() 543 u32 off = vbo & sbi->cluster_mask; in wnd_rescan() 545 if (!run_lookup_entry(&wnd->run, vbo >> cluster_bits, in wnd_rescan() 569 wbit = vbo * 8; in wnd_rescan() 613 vbo += blocksize; in wnd_rescan() 682 size_t vbo; in wnd_map() local 690 vbo = (u64)iw << sb->s_blocksize_bits; in wnd_map() 1346 u64 vbo, lbo, bytes; in wnd_extend() local [all …]
|
| A D | record.c | 121 u64 vbo = (u64)mi->rno << sbi->record_bits; in mi_read() local 133 err = ntfs_read_bh(sbi, run, vbo, &rec->rhdr, bpr, &mi->nb); in mi_read() 152 vbo >> sbi->cluster_bits); in mi_read() 162 err = ntfs_read_bh(sbi, run, vbo, &rec->rhdr, bpr, &mi->nb); in mi_read() 420 u64 vbo = (u64)rno << sbi->record_bits; in mi_format_new() local 461 err = ntfs_get_bh(sbi, &ni->file.run, vbo, sbi->record_size, in mi_format_new()
|
| A D | ntfs_fs.h | 460 int attr_collapse_range(struct ntfs_inode *ni, u64 vbo, u64 bytes); 461 int attr_insert_range(struct ntfs_inode *ni, u64 vbo, u64 bytes); 462 int attr_punch_hole(struct ntfs_inode *ni, u64 vbo, u64 bytes, u32 *frame_size); 569 __u64 vbo, __u64 len); 623 u64 vbo, const void *buf, size_t bytes, int sync); 625 const struct runs_tree *run, u64 vbo); 627 u64 vbo, void *buf, u32 bytes, struct ntfs_buffers *nb); 628 int ntfs_read_bh(struct ntfs_sb_info *sbi, const struct runs_tree *run, u64 vbo, 631 int ntfs_get_bh(struct ntfs_sb_info *sbi, const struct runs_tree *run, u64 vbo, 636 struct page **pages, u32 nr_pages, u64 vbo, u32 bytes, [all …]
|
| A D | inode.c | 601 vcn = vbo >> cluster_bits; in ntfs_get_block_vbo() 602 off = vbo & sbi->cluster_mask; in ntfs_get_block_vbo() 638 if (vbo >= valid) in ntfs_get_block_vbo() 645 if (vbo >= valid) in ntfs_get_block_vbo() 648 if (vbo + bytes > valid) { in ntfs_get_block_vbo() 649 ni->i_valid = vbo + bytes; in ntfs_get_block_vbo() 652 } else if (vbo >= valid) { in ntfs_get_block_vbo() 667 u32 voff = valid - vbo; in ntfs_get_block_vbo() 796 loff_t vbo = iocb->ki_pos; in ntfs_direct_IO() local 818 end = vbo + ret; in ntfs_direct_IO() [all …]
|