1 /*
2  * Copyright (c) 2018-2019 Jan Van Winkel <jan.van_winkel@dxplore.eu>
3  * Copyright (c) 2025 Abderrahmane JARMOUNI
4  *
5  * SPDX-License-Identifier: Apache-2.0
6  */
7 
8 #include <zephyr/init.h>
9 #include <zephyr/kernel.h>
10 #include <lvgl.h>
11 #include "lvgl_display.h"
12 #include "lvgl_common_input.h"
13 #include "lvgl_zephyr.h"
14 #ifdef CONFIG_LV_Z_USE_FILESYSTEM
15 #include "lvgl_fs.h"
16 #endif
17 #ifdef CONFIG_LV_Z_MEM_POOL_SYS_HEAP
18 #include "lvgl_mem.h"
19 #endif
20 #include LV_STDLIB_INCLUDE
21 
22 #include <zephyr/logging/log.h>
23 LOG_MODULE_REGISTER(lvgl, CONFIG_LV_Z_LOG_LEVEL);
24 
25 static lv_display_t *lv_displays[DT_ZEPHYR_DISPLAYS_COUNT];
26 struct lvgl_disp_data disp_data[DT_ZEPHYR_DISPLAYS_COUNT] = {{
27 	.blanking_on = false,
28 }};
29 
30 #define DISPLAY_BUFFER_ALIGN(alignbytes) __aligned(alignbytes)
31 
32 #if DT_HAS_COMPAT_STATUS_OKAY(zephyr_displays)
33 #define DISPLAY_NODE(n) DT_ZEPHYR_DISPLAY(n)
34 #elif DT_HAS_CHOSEN(zephyr_display)
35 #define DISPLAY_NODE(n) DT_CHOSEN(zephyr_display)
36 #else
37 #error Could not find "zephyr,display" chosen property, or a "zephyr,displays" compatible node in DT
38 #define DISPLAY_NODE(n) DT_INVALID_NODE
39 #endif
40 
41 #define IS_MONOCHROME_DISPLAY                                                                      \
42 	UTIL_OR(IS_EQ(CONFIG_LV_Z_BITS_PER_PIXEL, 1), IS_EQ(CONFIG_LV_COLOR_DEPTH_1, 1))
43 
44 #define ALLOC_MONOCHROME_CONV_BUFFER                                                               \
45 	UTIL_AND(IS_EQ(IS_MONOCHROME_DISPLAY, 1),                                                  \
46 		 IS_EQ(CONFIG_LV_Z_MONOCHROME_CONVERSION_BUFFER, 1))
47 
48 #ifdef CONFIG_LV_Z_BUFFER_ALLOC_STATIC
49 
50 #define DISPLAY_WIDTH(n)  DT_PROP(DISPLAY_NODE(n), width)
51 #define DISPLAY_HEIGHT(n) DT_PROP(DISPLAY_NODE(n), height)
52 
53 #if IS_MONOCHROME_DISPLAY
54 /* monochrome buffers are expected to have 8 preceding bytes for the color palette */
55 #define BUFFER_SIZE(n)                                                                             \
56 	(((CONFIG_LV_Z_VDB_SIZE * ROUND_UP(DISPLAY_WIDTH(n), 8) *                                  \
57 	   ROUND_UP(DISPLAY_HEIGHT(n), 8)) /                                                       \
58 	  100) / 8 +                                                                               \
59 	 8)
60 #else
61 #define BUFFER_SIZE(n)                                                                             \
62 	(CONFIG_LV_Z_BITS_PER_PIXEL *                                                              \
63 	 ((CONFIG_LV_Z_VDB_SIZE * DISPLAY_WIDTH(n) * DISPLAY_HEIGHT(n)) / 100) / 8)
64 #endif /* IS_MONOCHROME_DISPLAY */
65 
66 static uint32_t disp_buf_size[DT_ZEPHYR_DISPLAYS_COUNT] = {0};
67 static uint8_t *buf0_p[DT_ZEPHYR_DISPLAYS_COUNT] = {NULL};
68 
69 #ifdef CONFIG_LV_Z_DOUBLE_VDB
70 static uint8_t *buf1_p[DT_ZEPHYR_DISPLAYS_COUNT] = {NULL};
71 #endif
72 
73 #if ALLOC_MONOCHROME_CONV_BUFFER
74 static uint8_t *mono_vtile_buf_p[DT_ZEPHYR_DISPLAYS_COUNT] = {NULL};
75 #endif
76 
77 /* NOTE: depending on chosen color depth, buffers may be accessed using uint8_t *,*/
78 /* uint16_t * or uint32_t *, therefore buffer needs to be aligned accordingly to */
79 /* prevent unaligned memory accesses. */
80 
81 /* clang-format off */
82 #define LV_BUFFERS_DEFINE(n)									\
83 	static DISPLAY_BUFFER_ALIGN(LV_DRAW_BUF_ALIGN) uint8_t buf0_##n[BUFFER_SIZE(n)]		\
84 	IF_ENABLED(CONFIG_LV_Z_VDB_CUSTOM_SECTION, (Z_GENERIC_SECTION(.lvgl_buf)))		\
85 						       __aligned(CONFIG_LV_Z_VDB_ALIGN);	\
86 												\
87 	IF_ENABLED(CONFIG_LV_Z_DOUBLE_VDB, (							\
88 	static DISPLAY_BUFFER_ALIGN(LV_DRAW_BUF_ALIGN) uint8_t buf1_##n[BUFFER_SIZE(n)]		\
89 	IF_ENABLED(CONFIG_LV_Z_VDB_CUSTOM_SECTION, (Z_GENERIC_SECTION(.lvgl_buf)))		\
90 			__aligned(CONFIG_LV_Z_VDB_ALIGN);					\
91 	))											\
92 												\
93 	IF_ENABLED(ALLOC_MONOCHROME_CONV_BUFFER, (						\
94 	static uint8_t mono_vtile_buf_##n[BUFFER_SIZE(n)]					\
95 	IF_ENABLED(CONFIG_LV_Z_VDB_CUSTOM_SECTION, (Z_GENERIC_SECTION(.lvgl_buf)))		\
96 			__aligned(CONFIG_LV_Z_VDB_ALIGN);					\
97 	))
98 
99 FOR_EACH(LV_BUFFERS_DEFINE, (), LV_DISPLAYS_IDX_LIST);
100 
101 #define LV_BUFFERS_REFERENCES(n)                                                                   \
102 	disp_buf_size[n] = (uint32_t)BUFFER_SIZE(n);                                               \
103 	buf0_p[n] = buf0_##n;                                                                      \
104 	IF_ENABLED(CONFIG_LV_Z_DOUBLE_VDB, (buf1_p[n] = buf1_##n;))                                \
105 	IF_ENABLED(ALLOC_MONOCHROME_CONV_BUFFER, (mono_vtile_buf_p[n] = mono_vtile_buf_##n;))
106 /* clang-format on */
107 
108 #endif /* CONFIG_LV_Z_BUFFER_ALLOC_STATIC */
109 
110 #if CONFIG_LV_Z_LOG_LEVEL != 0
lvgl_log(lv_log_level_t level,const char * buf)111 static void lvgl_log(lv_log_level_t level, const char *buf)
112 {
113 	switch (level) {
114 	case LV_LOG_LEVEL_ERROR:
115 		LOG_ERR("%s", buf + (sizeof("[Error] ") - 1));
116 		break;
117 	case LV_LOG_LEVEL_WARN:
118 		LOG_WRN("%s", buf + (sizeof("[Warn] ") - 1));
119 		break;
120 	case LV_LOG_LEVEL_INFO:
121 		LOG_INF("%s", buf + (sizeof("[Info] ") - 1));
122 		break;
123 	case LV_LOG_LEVEL_TRACE:
124 		LOG_DBG("%s", buf + (sizeof("[Trace] ") - 1));
125 		break;
126 	case LV_LOG_LEVEL_USER:
127 		LOG_INF("%s", buf + (sizeof("[User] ") - 1));
128 		break;
129 	}
130 }
131 #endif
132 
133 #ifdef CONFIG_LV_Z_BUFFER_ALLOC_STATIC
134 
lvgl_allocate_rendering_buffers_static(lv_display_t * display,int disp_idx)135 static void lvgl_allocate_rendering_buffers_static(lv_display_t *display, int disp_idx)
136 {
137 #ifdef CONFIG_LV_Z_DOUBLE_VDB
138 	lv_display_set_buffers(display, buf0_p[disp_idx], buf1_p[disp_idx], disp_buf_size[disp_idx],
139 			       LV_DISPLAY_RENDER_MODE_PARTIAL);
140 #else
141 	lv_display_set_buffers(display, buf0_p[disp_idx], NULL, disp_buf_size[disp_idx],
142 			       LV_DISPLAY_RENDER_MODE_PARTIAL);
143 #endif /* CONFIG_LV_Z_DOUBLE_VDB */
144 
145 #if ALLOC_MONOCHROME_CONV_BUFFER
146 	lvgl_set_mono_conversion_buffer(mono_vtile_buf_p[disp_idx], disp_buf_size[disp_idx]);
147 #endif
148 }
149 
150 #else
151 
lvgl_allocate_rendering_buffers(lv_display_t * display)152 static int lvgl_allocate_rendering_buffers(lv_display_t *display)
153 {
154 	void *buf0 = NULL;
155 	void *buf1 = NULL;
156 	uint16_t buf_nbr_pixels;
157 	uint32_t buf_size;
158 	struct lvgl_disp_data *data = (struct lvgl_disp_data *)lv_display_get_user_data(display);
159 	uint16_t hor_res = lv_display_get_horizontal_resolution(display);
160 	uint16_t ver_res = lv_display_get_vertical_resolution(display);
161 
162 	buf_nbr_pixels = (CONFIG_LV_Z_VDB_SIZE * hor_res * ver_res) / 100;
163 	/* one horizontal line is the minimum buffer requirement for lvgl */
164 	if (buf_nbr_pixels < hor_res) {
165 		buf_nbr_pixels = hor_res;
166 	}
167 
168 	switch (data->cap.current_pixel_format) {
169 	case PIXEL_FORMAT_ARGB_8888:
170 		buf_size = 4 * buf_nbr_pixels;
171 		break;
172 	case PIXEL_FORMAT_RGB_888:
173 		buf_size = 3 * buf_nbr_pixels;
174 		break;
175 	case PIXEL_FORMAT_RGB_565:
176 		buf_size = 2 * buf_nbr_pixels;
177 		break;
178 	case PIXEL_FORMAT_L_8:
179 		buf_size = buf_nbr_pixels;
180 		break;
181 	case PIXEL_FORMAT_MONO01:
182 	case PIXEL_FORMAT_MONO10:
183 		buf_size = buf_nbr_pixels / 8 + 8;
184 		buf_size += (buf_nbr_pixels % 8) == 0 ? 0 : 1;
185 		break;
186 	default:
187 		return -ENOTSUP;
188 	}
189 
190 	buf0 = lv_malloc(buf_size);
191 	if (buf0 == NULL) {
192 		LOG_ERR("Failed to allocate memory for rendering buffer");
193 		return -ENOMEM;
194 	}
195 
196 #ifdef CONFIG_LV_Z_DOUBLE_VDB
197 	buf1 = lv_malloc(buf_size);
198 	if (buf1 == NULL) {
199 		lv_free(buf0);
200 		LOG_ERR("Failed to allocate memory for rendering buffer");
201 		return -ENOMEM;
202 	}
203 #endif
204 
205 #if ALLOC_MONOCHROME_CONV_BUFFER
206 	void *vtile_buf = lv_malloc(buf_size);
207 
208 	if (vtile_buf == NULL) {
209 		lv_free(buf0);
210 		lv_free(buf1);
211 		LOG_ERR("Failed to allocate memory for vtile buffer");
212 		return -ENOMEM;
213 	}
214 	lvgl_set_mono_conversion_buffer(vtile_buf, buf_size);
215 #endif /* ALLOC_MONOCHROME_CONV_BUFFER */
216 
217 	lv_display_set_buffers(display, buf0, buf1, buf_size, LV_DISPLAY_RENDER_MODE_PARTIAL);
218 	return 0;
219 }
220 #endif /* CONFIG_LV_Z_BUFFER_ALLOC_STATIC */
221 
222 #ifdef CONFIG_LV_Z_RUN_LVGL_ON_WORKQUEUE
223 
224 static K_THREAD_STACK_DEFINE(lvgl_workqueue_stack, CONFIG_LV_Z_LVGL_WORKQUEUE_STACK_SIZE);
225 static struct k_work_q lvgl_workqueue;
226 
lvgl_timer_handler_work(struct k_work * work)227 static void lvgl_timer_handler_work(struct k_work *work)
228 {
229 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
230 	uint32_t wait_time;
231 
232 	lvgl_lock();
233 	wait_time = lv_timer_handler();
234 	lvgl_unlock();
235 
236 	/* schedule next timer verification */
237 	if (wait_time == LV_NO_TIMER_READY) {
238 		wait_time = CONFIG_LV_DEF_REFR_PERIOD;
239 	}
240 
241 	k_work_schedule_for_queue(&lvgl_workqueue, dwork, K_MSEC(wait_time));
242 }
243 static K_WORK_DELAYABLE_DEFINE(lvgl_work, lvgl_timer_handler_work);
244 
lvgl_get_workqueue(void)245 struct k_work_q *lvgl_get_workqueue(void)
246 {
247 	return &lvgl_workqueue;
248 }
249 #endif /* CONFIG_LV_Z_RUN_LVGL_ON_WORKQUEUE */
250 
251 #if defined(CONFIG_LV_Z_LVGL_MUTEX) && !defined(CONFIG_LV_Z_USE_OSAL)
252 
253 static K_MUTEX_DEFINE(lvgl_mutex);
254 
lvgl_lock(void)255 void lvgl_lock(void)
256 {
257 	(void)k_mutex_lock(&lvgl_mutex, K_FOREVER);
258 }
259 
lvgl_trylock(void)260 bool lvgl_trylock(void)
261 {
262 	return k_mutex_lock(&lvgl_mutex, K_NO_WAIT) == 0;
263 }
264 
lvgl_unlock(void)265 void lvgl_unlock(void)
266 {
267 	(void)k_mutex_unlock(&lvgl_mutex);
268 }
269 
270 #endif /* CONFIG_LV_Z_LVGL_MUTEX */
271 
lv_mem_init(void)272 void lv_mem_init(void)
273 {
274 #ifdef CONFIG_LV_Z_MEM_POOL_SYS_HEAP
275 	lvgl_heap_init();
276 #endif /* CONFIG_LV_Z_MEM_POOL_SYS_HEAP */
277 }
278 
lv_mem_deinit(void)279 void lv_mem_deinit(void)
280 {
281 	/* Reinitializing the heap clears all allocations, no action needed */
282 }
283 
lv_mem_monitor_core(lv_mem_monitor_t * mon_p)284 void lv_mem_monitor_core(lv_mem_monitor_t *mon_p)
285 {
286 	memset(mon_p, 0, sizeof(lv_mem_monitor_t));
287 
288 #if CONFIG_LV_Z_MEM_POOL_SYS_HEAP
289 	struct sys_memory_stats stats;
290 
291 	lvgl_heap_stats(&stats);
292 	mon_p->used_pct =
293 		(stats.allocated_bytes * 100) / (stats.allocated_bytes + stats.free_bytes);
294 	mon_p->max_used = stats.max_allocated_bytes;
295 #else
296 	LOG_WRN_ONCE("Memory statistics only supported for CONFIG_LV_Z_MEM_POOL_SYS_HEAP");
297 #endif /* CONFIG_LV_Z_MEM_POOL_SYS_HEAP */
298 }
299 
lv_mem_test_core(void)300 lv_result_t lv_mem_test_core(void)
301 {
302 	/* Not supported for now */
303 	return LV_RESULT_OK;
304 }
305 
306 #define ENUMERATE_DISPLAY_DEVS(n) display_dev[n] = DEVICE_DT_GET(DISPLAY_NODE(n));
307 
lvgl_init(void)308 int lvgl_init(void)
309 {
310 	const struct device *display_dev[DT_ZEPHYR_DISPLAYS_COUNT];
311 	struct lvgl_disp_data *p_disp_data;
312 	int err;
313 
314 	/* clang-format off */
315 	FOR_EACH(ENUMERATE_DISPLAY_DEVS, (), LV_DISPLAYS_IDX_LIST);
316 	/* clang-format on */
317 	for (int i = 0; i < DT_ZEPHYR_DISPLAYS_COUNT; i++) {
318 		if (!device_is_ready(display_dev[i])) {
319 			LOG_ERR("Display device %d is not ready", i);
320 			return -ENODEV;
321 		}
322 	}
323 
324 	lv_init();
325 	lv_tick_set_cb(k_uptime_get_32);
326 
327 #if CONFIG_LV_Z_LOG_LEVEL != 0
328 	lv_log_register_print_cb(lvgl_log);
329 #endif
330 
331 #ifdef CONFIG_LV_Z_USE_FILESYSTEM
332 	lvgl_fs_init();
333 #endif
334 
335 #ifdef CONFIG_LV_Z_BUFFER_ALLOC_STATIC
336 	/* clang-format off */
337 	FOR_EACH(LV_BUFFERS_REFERENCES, (), LV_DISPLAYS_IDX_LIST);
338 	/* clang-format on */
339 #endif
340 
341 	for (int i = 0; i < DT_ZEPHYR_DISPLAYS_COUNT; i++) {
342 		p_disp_data = &disp_data[i];
343 		p_disp_data->display_dev = display_dev[i];
344 		display_get_capabilities(display_dev[i], &p_disp_data->cap);
345 
346 		lv_displays[i] = lv_display_create(p_disp_data->cap.x_resolution,
347 						   p_disp_data->cap.y_resolution);
348 		if (!lv_displays[i]) {
349 			LOG_ERR("Failed to create display %d LV object.", i);
350 			return -ENOMEM;
351 		}
352 
353 		lv_display_set_user_data(lv_displays[i], p_disp_data);
354 		if (set_lvgl_rendering_cb(lv_displays[i]) != 0) {
355 			LOG_ERR("Display %d not supported.", i);
356 			return -ENOTSUP;
357 		}
358 
359 #ifdef CONFIG_LV_Z_BUFFER_ALLOC_STATIC
360 		lvgl_allocate_rendering_buffers_static(lv_displays[i], i);
361 #else
362 		err = lvgl_allocate_rendering_buffers(lv_displays[i]);
363 		if (err < 0) {
364 			return err;
365 		}
366 #endif
367 
368 #ifdef CONFIG_LV_Z_FULL_REFRESH
369 		lv_display_set_render_mode(lv_displays[i], LV_DISPLAY_RENDER_MODE_FULL);
370 #endif
371 	}
372 
373 	err = lvgl_init_input_devices();
374 	if (err < 0) {
375 		LOG_ERR("Failed to initialize input devices.");
376 		return err;
377 	}
378 
379 #ifdef CONFIG_LV_Z_RUN_LVGL_ON_WORKQUEUE
380 	const struct k_work_queue_config lvgl_workqueue_cfg = {
381 		.name = "lvgl",
382 	};
383 
384 	k_work_queue_init(&lvgl_workqueue);
385 	k_work_queue_start(&lvgl_workqueue, lvgl_workqueue_stack,
386 			   K_THREAD_STACK_SIZEOF(lvgl_workqueue_stack),
387 			   CONFIG_LV_Z_LVGL_WORKQUEUE_PRIORITY, &lvgl_workqueue_cfg);
388 
389 	k_work_submit_to_queue(&lvgl_workqueue, &lvgl_work.work);
390 #endif
391 
392 	return 0;
393 }
394 
395 #ifdef CONFIG_LV_Z_AUTO_INIT
396 SYS_INIT(lvgl_init, APPLICATION, CONFIG_LV_Z_INIT_PRIORITY);
397 #endif /* CONFIG_LV_Z_AUTO_INIT */
398