Lines Matching refs:asize
2672 u32 asize = le32_to_cpu(attr->size); in check_attr() local
2678 if (asize >= sbi->record_size || in check_attr()
2679 asize + PtrOffset(rec, attr) >= sbi->record_size || in check_attr()
2682 asize)) { in check_attr()
2690 if (rsize >= asize || in check_attr()
2691 le16_to_cpu(attr->res.data_off) + rsize > asize) { in check_attr()
2702 if (svcn > evcn + 1 || run_off >= asize || in check_attr()
2708 if (run_off > asize) in check_attr()
2712 Add2Ptr(attr, run_off), asize - run_off) < 0) { in check_attr()
2725 attr, le16_to_cpu(attr->res.data_off))) > asize) { in check_attr()
2819 u32 asize; in check_if_attr() local
2824 asize = le32_to_cpu(attr->size); in check_if_attr()
2825 if (!asize) in check_if_attr()
2828 o += asize; in check_if_attr()
2829 attr = Add2Ptr(attr, asize); in check_if_attr()
2843 u32 asize; in check_if_index_root() local
2848 asize = le32_to_cpu(attr->size); in check_if_index_root()
2849 if (!asize) in check_if_index_root()
2852 o += asize; in check_if_index_root()
2853 attr = Add2Ptr(attr, asize); in check_if_index_root()
2867 u32 asize = le32_to_cpu(attr->size); in check_if_root_index() local
2872 if (o >= asize) in check_if_root_index()
2915 u32 asize = le32_to_cpu(attr->size); in change_attr_size() local
2916 int dsize = nsize - asize; in change_attr_size()
2917 u8 *next = Add2Ptr(attr, asize); in change_attr_size()
2973 u32 asize = name_size + in attr_create_nonres_log() local
2976 attr = kzalloc(asize, GFP_NOFS); in attr_create_nonres_log()
2981 attr->size = cpu_to_le32(asize); in attr_create_nonres_log()
3032 u32 nsize, t32, asize, used, esize, off, bits; in do_action() local
3213 asize = le32_to_cpu(attr2->size); in do_action()
3217 !IS_ALIGNED(asize, 8) || in do_action()
3218 Add2Ptr(attr2, asize) > Add2Ptr(lrh, rec_len) || in do_action()
3223 memmove(Add2Ptr(attr, asize), attr, used - roff); in do_action()
3224 memcpy(attr, attr2, asize); in do_action()
3226 rec->used = cpu_to_le32(used + asize); in do_action()
3249 asize = le32_to_cpu(attr->size); in do_action()
3255 rec->used = cpu_to_le32(used - asize); in do_action()
3259 memmove(attr, Add2Ptr(attr, asize), used - asize - roff); in do_action()
3270 asize = le32_to_cpu(attr->size); in do_action()
3274 if (nsize > asize) in do_action()
3279 if (nsize > asize && nsize - asize > record_size - used) in do_action()
3285 if (nsize < asize) { in do_action()
3290 memmove(Add2Ptr(attr, nsize), Add2Ptr(attr, asize), in do_action()
3291 used - le16_to_cpu(lrh->record_off) - asize); in do_action()
3293 rec->used = cpu_to_le32(used + nsize - asize); in do_action()
3318 asize = le32_to_cpu(attr->size); in do_action()
3322 aoff < le16_to_cpu(attr->nres.run_off) || aoff > asize || in do_action()
3323 (nsize > asize && nsize - asize > record_size - used)) { in do_action()
3329 memmove(Add2Ptr(attr, nsize), Add2Ptr(attr, asize), in do_action()
3330 used - le16_to_cpu(lrh->record_off) - asize); in do_action()
3331 rec->used = cpu_to_le32(used + nsize - asize); in do_action()