1/* 2 * Copyright (c) 2022-2023 HPMicro 3 * SPDX-License-Identifier: BSD-3-Clause 4 */ 5 6ENTRY(_start) 7 8STACK_SIZE = _stack_size; 9HEAP_SIZE = _heap_size; 10UF2_BOOTLOADER_RESERVED_LENGTH = DEFINED(_uf2_bl_length) ? _uf2_bl_length : 0x20000; 11 12MEMORY 13{ 14 XPI0 (rx) : ORIGIN = 0x80000000 + UF2_BOOTLOADER_RESERVED_LENGTH, LENGTH = _flash_size - UF2_BOOTLOADER_RESERVED_LENGTH 15 ILM (wx) : ORIGIN = 0x00000000, LENGTH = 256K 16 DLM (w) : ORIGIN = 0x00200000, LENGTH = 256K 17 AXI_SRAM (wx) : ORIGIN = 0x01200000, LENGTH = 512K 18 AXI_SRAM_NONCACHEABLE (wx) : ORIGIN = 0x01280000, LENGTH = 256K 19 SHARE_RAM (w) : ORIGIN = 0x012FC000, LENGTH = 16K 20 AHB_SRAM (w) : ORIGIN = 0xF0200000, LENGTH = 32k 21} 22 23SECTIONS 24{ 25 .start : { 26 KEEP(*(.uf2_signature)) 27 KEEP(*(.start)) 28 } > XPI0 29 30 __vector_load_addr__ = ADDR(.start) + SIZEOF(.start); 31 .vectors ORIGIN(ILM) : AT(__vector_load_addr__) { 32 . = ALIGN(8); 33 __vector_ram_start__ = .; 34 KEEP(*(.vector_table)) 35 KEEP(*(.isr_vector)) 36 KEEP(*(.vector_s_table)) 37 KEEP(*(.isr_s_vector)) 38 . = ALIGN(8); 39 __vector_ram_end__ = .; 40 } > ILM 41 42 .text (__vector_load_addr__ + SIZEOF(.vectors)): { 43 . = ALIGN(8); 44 *(.text) 45 *(.text*) 46 *(.rodata) 47 *(.rodata*) 48 *(.srodata) 49 *(.srodata*) 50 51 *(.hash) 52 *(.dyn*) 53 *(.gnu*) 54 *(.pl*) 55 56 KEEP (*(.init)) 57 KEEP (*(.fini)) 58 59 /* section information for usbh class */ 60 . = ALIGN(8); 61 __usbh_class_info_start__ = .; 62 KEEP(*(.usbh_class_info)) 63 __usbh_class_info_end__ = .; 64 65 /* RT-Thread related sections - Start */ 66 /* section information for finsh shell */ 67 . = ALIGN(4); 68 __fsymtab_start = .; 69 KEEP(*(FSymTab)) 70 __fsymtab_end = .; 71 . = ALIGN(4); 72 __vsymtab_start = .; 73 KEEP(*(VSymTab)) 74 __vsymtab_end = .; 75 . = ALIGN(4); 76 77 . = ALIGN(4); 78 __rt_init_start = .; 79 KEEP(*(SORT(.rti_fn*))) 80 __rt_init_end = .; 81 . = ALIGN(4); 82 83 /* section information for modules */ 84 . = ALIGN(4); 85 __rtmsymtab_start = .; 86 KEEP(*(RTMSymTab)) 87 __rtmsymtab_end = .; 88 89 /* RT-Thread related sections - end */ 90 . = ALIGN(8); 91 } > XPI0 92 93 .eh_frame : 94 { 95 __eh_frame_start = .; 96 KEEP(*(.eh_frame)) 97 __eh_frame_end = .; 98 } > XPI0 99 100 .eh_frame_hdr : 101 { 102 KEEP(*(.eh_frame_hdr)) 103 } > XPI0 104 __eh_frame_hdr_start = SIZEOF(.eh_frame_hdr) > 0 ? ADDR(.eh_frame_hdr) : 0; 105 __eh_frame_hdr_end = SIZEOF(.eh_frame_hdr) > 0 ? . : 0; 106 107 .rel : { 108 KEEP(*(.rel*)) 109 } > XPI0 110 111 PROVIDE (__etext = .); 112 PROVIDE (_etext = .); 113 PROVIDE (etext = .); 114 115 __data_load_addr__ = etext; 116 .data : AT(__data_load_addr__) { 117 . = ALIGN(8); 118 __data_start__ = .; 119 __global_pointer$ = . + 0x800; 120 *(.data) 121 *(.data*) 122 *(.sdata) 123 *(.sdata*) 124 125 KEEP(*(.jcr)) 126 KEEP(*(.dynamic)) 127 KEEP(*(.got*)) 128 KEEP(*(.got)) 129 KEEP(*(.gcc_except_table)) 130 KEEP(*(.gcc_except_table.*)) 131 132 . = ALIGN(8); 133 PROVIDE(__preinit_array_start = .); 134 KEEP(*(.preinit_array)) 135 PROVIDE(__preinit_array_end = .); 136 137 . = ALIGN(8); 138 PROVIDE(__init_array_start = .); 139 KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*))) 140 KEEP(*(.init_array)) 141 PROVIDE(__init_array_end = .); 142 143 . = ALIGN(8); 144 PROVIDE(__finit_array_start = .); 145 KEEP(*(SORT_BY_INIT_PRIORITY(.finit_array.*))) 146 KEEP(*(.finit_array)) 147 PROVIDE(__finit_array_end = .); 148 149 . = ALIGN(8); 150 KEEP(*crtbegin*.o(.ctors)) 151 KEEP(*(EXCLUDE_FILE (*crtend*.o) .ctors)) 152 KEEP(*(SORT(.ctors.*))) 153 KEEP(*(.ctors)) 154 155 . = ALIGN(8); 156 KEEP(*crtbegin*.o(.dtors)) 157 KEEP(*(EXCLUDE_FILE (*crtend*.o) .dtors)) 158 KEEP(*(SORT(.dtors.*))) 159 KEEP(*(.dtors)) 160 . = ALIGN(8); 161 __data_end__ = .; 162 PROVIDE (__edata = .); 163 PROVIDE (_edata = .); 164 PROVIDE (edata = .); 165 } > AXI_SRAM 166 167 __fast_load_addr__ = etext + SIZEOF(.data); 168 .fast : AT(__fast_load_addr__) { 169 . = ALIGN(8); 170 PROVIDE(__ramfunc_start__ = .); 171 *(.fast) 172 *(.fast.*) 173 . = ALIGN(8); 174 PROVIDE(__ramfunc_end__ = .); 175 } > ILM 176 177 __tdata_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast); 178 .tdata : AT(__tdata_load_addr__) { 179 . = ALIGN(8); 180 PROVIDE(__tdata_start__ = .); 181 *(.tdata) 182 *(.tdata.*) 183 *(.gnu.linkonce.td.*) 184 . = ALIGN(8); 185 PROVIDE(__tdata_end__ = .); 186 } > AXI_SRAM 187 188 .tbss (NOLOAD) : { 189 . = ALIGN(8); 190 PROVIDE(__tbss_start__ = .); 191 __thread_pointer$ = .; 192 *(.tbss) 193 *(.tbss.*) 194 *(.gnu.linkonce.tb.*) 195 *(.tcommon) 196 . = ALIGN(8); 197 PROVIDE(__tbss_end__ = .); 198 } > AXI_SRAM 199 200 __noncacheable_init_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata); 201 .noncacheable.init : AT(__noncacheable_init_load_addr__) { 202 . = ALIGN(8); 203 __noncacheable_init_start__ = .; 204 KEEP(*(.noncacheable.init)) 205 __noncacheable_init_end__ = .; 206 . = ALIGN(8); 207 } > AXI_SRAM_NONCACHEABLE 208 209 __fast_ram_init_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata) + SIZEOF(.noncacheable.init); 210 .fast_ram.init : AT(__fast_ram_init_load_addr__) { 211 . = ALIGN(8); 212 __fast_ram_init_start__ = .; 213 KEEP(*(.fast_ram.init)) 214 __fast_ram_init_end__ = .; 215 . = ALIGN(8); 216 } > DLM 217 218 .bss (NOLOAD) : { 219 . = ALIGN(8); 220 __bss_start__ = .; 221 *(.bss) 222 *(.bss*) 223 *(.sbss*) 224 *(.scommon) 225 *(.scommon*) 226 *(.dynsbss*) 227 *(COMMON) 228 . = ALIGN(8); 229 _end = .; 230 __bss_end__ = .; 231 } > AXI_SRAM 232 233 .framebuffer (NOLOAD) : { 234 . = ALIGN(8); 235 KEEP(*(.framebuffer)) 236 . = ALIGN(8); 237 } > AXI_SRAM 238 239 .noncacheable.bss (NOLOAD) : { 240 . = ALIGN(8); 241 KEEP(*(.noncacheable)) 242 __noncacheable_bss_start__ = .; 243 KEEP(*(.noncacheable.bss)) 244 __noncacheable_bss_end__ = .; 245 . = ALIGN(8); 246 } > AXI_SRAM_NONCACHEABLE 247 248 .sh_mem (NOLOAD) : { 249 KEEP(*(.sh_mem)) 250 } > SHARE_RAM 251 252 .ahb_sram (NOLOAD) : { 253 KEEP(*(.ahb_sram)) 254 } > AHB_SRAM 255 256 .fast_ram.bss (NOLOAD) : { 257 . = ALIGN(8); 258 KEEP(*(.fast_ram)) 259 __fast_ram_bss_start__ = .; 260 KEEP(*(.fast_ram.bss)) 261 __fast_ram_bss_end__ = .; 262 . = ALIGN(8); 263 } > DLM 264 265 .heap (NOLOAD) : { 266 . = ALIGN(8); 267 __heap_start__ = .; 268 . += HEAP_SIZE; 269 __heap_end__ = .; 270 } > DLM 271 272 .stack (NOLOAD) : { 273 . = ALIGN(16); 274 __stack_base__ = .; 275 . += STACK_SIZE; 276 . = ALIGN(16); 277 PROVIDE (_stack = .); 278 PROVIDE (_stack_safe = .); 279 } > DLM 280 281 __noncacheable_start__ = ORIGIN(AXI_SRAM_NONCACHEABLE); 282 __noncacheable_end__ = ORIGIN(AXI_SRAM_NONCACHEABLE) + LENGTH(AXI_SRAM_NONCACHEABLE); 283 __share_mem_start__ = ORIGIN(SHARE_RAM); 284 __share_mem_end__ = ORIGIN(SHARE_RAM) + LENGTH(SHARE_RAM); 285 286 __fw_size__ = SIZEOF(.start) + SIZEOF(.vectors) + SIZEOF(.rel) + SIZEOF(.text) + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata) + SIZEOF(.noncacheable.init) + SIZEOF(.fast_ram.init); 287 __last_addr__ = __fast_ram_init_load_addr__ + SIZEOF(.fast_ram.init); 288 ASSERT(((__fw_size__ <= LENGTH(XPI0)) && (__last_addr__ <= (ORIGIN(XPI0) + LENGTH(XPI0)))), "****** FAILED! XPI0 has not enough space! ******") 289} 290