Lines Matching refs:pe
744 struct dm_snap_pending_exception *pe = mempool_alloc(&s->pending_pool, in alloc_pending_exception() local
748 pe->snap = s; in alloc_pending_exception()
750 return pe; in alloc_pending_exception()
753 static void free_pending_exception(struct dm_snap_pending_exception *pe) in free_pending_exception() argument
755 struct dm_snapshot *s = pe->snap; in free_pending_exception()
757 mempool_free(pe, &s->pending_pool); in free_pending_exception()
1645 struct dm_snap_pending_exception *pe = context; in pending_complete() local
1647 struct dm_snapshot *s = pe->snap; in pending_complete()
1654 dm_exception_table_lock_init(s, pe->e.old_chunk, &lock); in pending_complete()
1673 *e = pe->e; in pending_complete()
1696 if (__chunk_is_tracked(s, pe->e.old_chunk)) { in pending_complete()
1698 __check_for_conflicting_io(s, pe->e.old_chunk); in pending_complete()
1704 dm_remove_exception(&pe->e); in pending_complete()
1708 snapshot_bios = bio_list_get(&pe->snapshot_bios); in pending_complete()
1709 origin_bios = bio_list_get(&pe->origin_bios); in pending_complete()
1710 full_bio = pe->full_bio; in pending_complete()
1712 full_bio->bi_end_io = pe->full_bio_end_io; in pending_complete()
1728 free_pending_exception(pe); in pending_complete()
1731 static void complete_exception(struct dm_snap_pending_exception *pe) in complete_exception() argument
1733 struct dm_snapshot *s = pe->snap; in complete_exception()
1736 s->store->type->commit_exception(s->store, &pe->e, !pe->copy_error, in complete_exception()
1737 pending_complete, pe); in complete_exception()
1746 struct dm_snap_pending_exception *pe = context; in copy_callback() local
1747 struct dm_snapshot *s = pe->snap; in copy_callback()
1749 pe->copy_error = read_err || write_err; in copy_callback()
1751 if (pe->exception_sequence == s->exception_complete_sequence) { in copy_callback()
1755 complete_exception(pe); in copy_callback()
1759 pe = rb_entry(next, struct dm_snap_pending_exception, in copy_callback()
1761 if (pe->exception_sequence != s->exception_complete_sequence) in copy_callback()
1765 rb_erase(&pe->out_of_order_node, &s->out_of_order_tree); in copy_callback()
1766 complete_exception(pe); in copy_callback()
1778 BUG_ON(pe->exception_sequence == pe2->exception_sequence); in copy_callback()
1779 if (pe->exception_sequence < pe2->exception_sequence) in copy_callback()
1785 rb_link_node(&pe->out_of_order_node, parent, p); in copy_callback()
1786 rb_insert_color(&pe->out_of_order_node, &s->out_of_order_tree); in copy_callback()
1794 static void start_copy(struct dm_snap_pending_exception *pe) in start_copy() argument
1796 struct dm_snapshot *s = pe->snap; in start_copy()
1804 src.sector = chunk_to_sector(s->store, pe->e.old_chunk); in start_copy()
1808 dest.sector = chunk_to_sector(s->store, pe->e.new_chunk); in start_copy()
1813 dm_kcopyd_copy(s->kcopyd_client, &src, 1, &dest, 0, copy_callback, pe); in start_copy()
1823 static void start_full_bio(struct dm_snap_pending_exception *pe, in start_full_bio() argument
1826 struct dm_snapshot *s = pe->snap; in start_full_bio()
1829 pe->full_bio = bio; in start_full_bio()
1830 pe->full_bio_end_io = bio->bi_end_io; in start_full_bio()
1834 copy_callback, pe); in start_full_bio()
1861 struct dm_snap_pending_exception *pe, chunk_t chunk) in __insert_pending_exception() argument
1863 pe->e.old_chunk = chunk; in __insert_pending_exception()
1864 bio_list_init(&pe->origin_bios); in __insert_pending_exception()
1865 bio_list_init(&pe->snapshot_bios); in __insert_pending_exception()
1866 pe->started = 0; in __insert_pending_exception()
1867 pe->full_bio = NULL; in __insert_pending_exception()
1870 if (s->store->type->prepare_exception(s->store, &pe->e)) { in __insert_pending_exception()
1872 free_pending_exception(pe); in __insert_pending_exception()
1876 pe->exception_sequence = s->exception_start_sequence++; in __insert_pending_exception()
1879 dm_insert_exception(&s->pending, &pe->e); in __insert_pending_exception()
1881 return pe; in __insert_pending_exception()
1894 struct dm_snap_pending_exception *pe, chunk_t chunk) in __find_pending_exception() argument
1900 free_pending_exception(pe); in __find_pending_exception()
1904 return __insert_pending_exception(s, pe, chunk); in __find_pending_exception()
1954 struct dm_snap_pending_exception *pe = NULL; in snapshot_map() local
2032 pe = __lookup_pending_exception(s, chunk); in snapshot_map()
2033 if (!pe) { in snapshot_map()
2035 pe = alloc_pending_exception(s); in snapshot_map()
2040 free_pending_exception(pe); in snapshot_map()
2045 pe = __find_pending_exception(s, pe, chunk); in snapshot_map()
2046 if (!pe) { in snapshot_map()
2066 remap_exception(s, &pe->e, bio, chunk); in snapshot_map()
2070 if (!pe->started && io_overlaps_chunk(s, bio)) { in snapshot_map()
2071 pe->started = 1; in snapshot_map()
2076 start_full_bio(pe, bio); in snapshot_map()
2080 bio_list_add(&pe->snapshot_bios, bio); in snapshot_map()
2082 if (!pe->started) { in snapshot_map()
2084 pe->started = 1; in snapshot_map()
2089 start_copy(pe); in snapshot_map()
2440 struct dm_snap_pending_exception *pe, *pe2; in __origin_write() local
2473 pe = __lookup_pending_exception(snap, chunk); in __origin_write()
2474 if (!pe) { in __origin_write()
2485 pe = alloc_pending_exception(snap); in __origin_write()
2493 free_pending_exception(pe); in __origin_write()
2497 pe = __insert_pending_exception(snap, pe, chunk); in __origin_write()
2498 if (!pe) { in __origin_write()
2506 free_pending_exception(pe); in __origin_write()
2507 pe = pe2; in __origin_write()
2519 bio_list_add(&pe->origin_bios, bio); in __origin_write()
2522 if (!pe->started) { in __origin_write()
2523 pe->started = 1; in __origin_write()
2524 pe_to_start_last = pe; in __origin_write()
2528 if (!pe->started) { in __origin_write()
2529 pe->started = 1; in __origin_write()
2530 pe_to_start_now = pe; in __origin_write()