1 /*
2  * Copyright (c) 2023, Arm Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <CppUTest/TestHarness.h>
8 #include <cstring>
9 #include <string>
10 
11 #include "common/uuid/uuid.h"
12 #include "media/disk/gpt_iterator/gpt_iterator.h"
13 #include "media/disk/guid.h"
14 #include "media/volume/block_volume/block_volume.h"
15 #include "media/volume/index/volume_index.h"
16 #include "service/block_storage/config/ref/ref_partition_configurator.h"
17 #include "service/block_storage/factory/ref_ram_gpt/block_store_factory.h"
18 
TEST_GROUP(GptIteratorTests)19 TEST_GROUP(GptIteratorTests)
20 {
21 	void setup()
22 	{
23 		volume_index_init();
24 
25 		/* Create GPT configured block_store using ref partition configuration */
26 		m_block_store = ref_ram_gpt_block_store_factory_create();
27 		CHECK_TRUE(m_block_store);
28 
29 		/* Use partition exposed for accessing the disk header */
30 		uuid_guid_octets_from_canonical(&m_partition_guid,
31 						DISK_GUID_UNIQUE_PARTITION_DISK_HEADER);
32 
33 		m_volume = NULL;
34 
35 		int status = block_volume_init(&m_block_volume, m_block_store, &m_partition_guid,
36 					       &m_volume);
37 
38 		LONGS_EQUAL(0, status);
39 		CHECK_TRUE(m_volume);
40 
41 		status = gpt_iterator_init(&m_iter, m_volume);
42 		LONGS_EQUAL(0, status);
43 	}
44 
45 	void teardown()
46 	{
47 		gpt_iterator_deinit(&m_iter);
48 		block_volume_deinit(&m_block_volume);
49 		ref_ram_gpt_block_store_factory_destroy(m_block_store);
50 		volume_index_clear();
51 	}
52 
53 	bool check_in_use(const gpt_entry_t *entry)
54 	{
55 		struct uuid_octets nil_uuid;
56 
57 		memset(&nil_uuid, 0, sizeof(nil_uuid));
58 		return (memcmp(nil_uuid.octets, &entry->type_uuid, sizeof(nil_uuid.octets)) != 0);
59 	}
60 
61 	static const uint32_t CLIENT_ID = 0;
62 	static const size_t FIRST_USABLE_LBA = 34;
63 
64 	struct uuid_octets m_partition_guid;
65 	struct block_store *m_block_store;
66 	struct block_volume m_block_volume;
67 	struct volume *m_volume;
68 	struct gpt_iterator m_iter;
69 };
70 
TEST(GptIteratorTests,iterateOverRefGpt)71 TEST(GptIteratorTests, iterateOverRefGpt)
72 {
73 	/* Expect the reference partition configuration to contain 4 partitions */
74 	struct uuid_octets guid;
75 	gpt_entry_t gpt_entry;
76 	int status;
77 
78 	/* Set iterator to first entry */
79 	gpt_iterator_first(&m_iter);
80 	CHECK_FALSE(gpt_iterator_is_done(&m_iter));
81 
82 	/* Expect to read ref partition 1 */
83 	status = gpt_iterator_current(&m_iter, &gpt_entry);
84 	LONGS_EQUAL(0, status);
85 	CHECK_TRUE(check_in_use(&gpt_entry));
86 	uuid_guid_octets_from_canonical(&guid, REF_PARTITION_1_GUID);
87 	MEMCMP_EQUAL(guid.octets, (uint8_t *)&gpt_entry.unique_uuid, sizeof(guid.octets));
88 	UNSIGNED_LONGS_EQUAL(FIRST_USABLE_LBA + REF_PARTITION_1_STARTING_LBA, gpt_entry.first_lba);
89 	UNSIGNED_LONGS_EQUAL((FIRST_USABLE_LBA + REF_PARTITION_1_ENDING_LBA), gpt_entry.last_lba);
90 
91 	/* Iterate to next */
92 	gpt_iterator_next(&m_iter);
93 	CHECK_FALSE(gpt_iterator_is_done(&m_iter));
94 
95 	/* Expect to read ref partition 2 */
96 	status = gpt_iterator_current(&m_iter, &gpt_entry);
97 	LONGS_EQUAL(0, status);
98 	CHECK_TRUE(check_in_use(&gpt_entry));
99 	uuid_guid_octets_from_canonical(&guid, REF_PARTITION_2_GUID);
100 	MEMCMP_EQUAL(guid.octets, (uint8_t *)&gpt_entry.unique_uuid, sizeof(guid.octets));
101 	UNSIGNED_LONGS_EQUAL(FIRST_USABLE_LBA + REF_PARTITION_2_STARTING_LBA, gpt_entry.first_lba);
102 	UNSIGNED_LONGS_EQUAL((FIRST_USABLE_LBA + REF_PARTITION_2_ENDING_LBA), gpt_entry.last_lba);
103 
104 	/* Iterate to next */
105 	gpt_iterator_next(&m_iter);
106 	CHECK_FALSE(gpt_iterator_is_done(&m_iter));
107 
108 	/* Expect to read ref partition 3 */
109 	status = gpt_iterator_current(&m_iter, &gpt_entry);
110 	LONGS_EQUAL(0, status);
111 	CHECK_TRUE(check_in_use(&gpt_entry));
112 	uuid_guid_octets_from_canonical(&guid, REF_PARTITION_3_GUID);
113 	MEMCMP_EQUAL(guid.octets, (uint8_t *)&gpt_entry.unique_uuid, sizeof(guid.octets));
114 	UNSIGNED_LONGS_EQUAL(FIRST_USABLE_LBA + REF_PARTITION_3_STARTING_LBA, gpt_entry.first_lba);
115 	UNSIGNED_LONGS_EQUAL((FIRST_USABLE_LBA + REF_PARTITION_3_ENDING_LBA), gpt_entry.last_lba);
116 
117 	/* Iterate to next */
118 	gpt_iterator_next(&m_iter);
119 	CHECK_FALSE(gpt_iterator_is_done(&m_iter));
120 
121 	/* Expect to read ref partition 4 */
122 	status = gpt_iterator_current(&m_iter, &gpt_entry);
123 	LONGS_EQUAL(0, status);
124 	CHECK_TRUE(check_in_use(&gpt_entry));
125 	uuid_guid_octets_from_canonical(&guid, REF_PARTITION_4_GUID);
126 	MEMCMP_EQUAL(guid.octets, (uint8_t *)&gpt_entry.unique_uuid, sizeof(guid.octets));
127 	UNSIGNED_LONGS_EQUAL(FIRST_USABLE_LBA + REF_PARTITION_4_STARTING_LBA, gpt_entry.first_lba);
128 	UNSIGNED_LONGS_EQUAL((FIRST_USABLE_LBA + REF_PARTITION_4_ENDING_LBA), gpt_entry.last_lba);
129 
130 	/* Don't expect any other entries to be in-use */
131 	gpt_iterator_next(&m_iter);
132 
133 	while (!gpt_iterator_is_done(&m_iter)) {
134 		status = gpt_iterator_current(&m_iter, &gpt_entry);
135 		LONGS_EQUAL(0, status);
136 		CHECK_FALSE(check_in_use(&gpt_entry));
137 
138 		gpt_iterator_next(&m_iter);
139 	}
140 }
141