1/* 2 * Copyright (c) 2021-2023 HPMicro 3 * SPDX-License-Identifier: BSD-3-Clause 4 */ 5 6ENTRY(_start) 7 8STACK_SIZE = _stack_size; 9HEAP_SIZE = _heap_size; 10UF2_BOOTLOADER_RESERVED_LENGTH = DEFINED(_uf2_bl_length) ? _uf2_bl_length : 0x20000; 11 12MEMORY 13{ 14 XPI0 (rx) : ORIGIN = 0x80000000 + UF2_BOOTLOADER_RESERVED_LENGTH, LENGTH = _flash_size - UF2_BOOTLOADER_RESERVED_LENGTH 15 ILM (wx) : ORIGIN = 0x00000000, LENGTH = 256K 16 DLM (w) : ORIGIN = 0x00080000, LENGTH = 256K 17 AXI_SRAM (wx) : ORIGIN = 0x01080000, LENGTH = 512K /* AXI SRAM0 */ 18 NONCACHEABLE_RAM (wx) : ORIGIN = 0x01100000, LENGTH = 256K /* AXI SRAM1 */ 19 SHARE_RAM (w) : ORIGIN = 0x0117C000, LENGTH = 16K 20 AHB_SRAM (w) : ORIGIN = 0xF0300000, LENGTH = 32k 21 APB_SRAM (w): ORIGIN = 0xF40F0000, LENGTH = 8k 22} 23 24SECTIONS 25{ 26 .start : { 27 KEEP(*(.uf2_signature)) 28 KEEP(*(.start)) 29 } > XPI0 30 31 __vector_load_addr__ = ADDR(.start) + SIZEOF(.start); 32 .vectors ORIGIN(ILM) : AT(__vector_load_addr__) { 33 . = ALIGN(8); 34 __vector_ram_start__ = .; 35 KEEP(*(.vector_table)) 36 KEEP(*(.isr_vector)) 37 . = ALIGN(8); 38 __vector_ram_end__ = .; 39 } > ILM 40 41 .text (__vector_load_addr__ + SIZEOF(.vectors)): { 42 . = ALIGN(8); 43 *(.text) 44 *(.text*) 45 *(.rodata) 46 *(.rodata*) 47 *(.srodata) 48 *(.srodata*) 49 50 *(.hash) 51 *(.dyn*) 52 *(.gnu*) 53 *(.pl*) 54 55 KEEP (*(.init)) 56 KEEP (*(.fini)) 57 58 /* section information for usbh class */ 59 . = ALIGN(8); 60 __usbh_class_info_start__ = .; 61 KEEP(*(.usbh_class_info)) 62 __usbh_class_info_end__ = .; 63 64 /* RT-Thread related sections - Start */ 65 /* section information for finsh shell */ 66 . = ALIGN(4); 67 __fsymtab_start = .; 68 KEEP(*(FSymTab)) 69 __fsymtab_end = .; 70 . = ALIGN(4); 71 __vsymtab_start = .; 72 KEEP(*(VSymTab)) 73 __vsymtab_end = .; 74 . = ALIGN(4); 75 76 . = ALIGN(4); 77 __rt_init_start = .; 78 KEEP(*(SORT(.rti_fn*))) 79 __rt_init_end = .; 80 . = ALIGN(4); 81 82 /* section information for modules */ 83 . = ALIGN(4); 84 __rtmsymtab_start = .; 85 KEEP(*(RTMSymTab)) 86 __rtmsymtab_end = .; 87 88 /* RT-Thread related sections - end */ 89 . = ALIGN(8); 90 } > XPI0 91 92 .eh_frame : 93 { 94 __eh_frame_start = .; 95 KEEP(*(.eh_frame)) 96 __eh_frame_end = .; 97 } > XPI0 98 99 .eh_frame_hdr : 100 { 101 KEEP(*(.eh_frame_hdr)) 102 } > XPI0 103 __eh_frame_hdr_start = SIZEOF(.eh_frame_hdr) > 0 ? ADDR(.eh_frame_hdr) : 0; 104 __eh_frame_hdr_end = SIZEOF(.eh_frame_hdr) > 0 ? . : 0; 105 106 .rel : { 107 KEEP(*(.rel*)) 108 } > XPI0 109 110 PROVIDE (__etext = .); 111 PROVIDE (_etext = .); 112 PROVIDE (etext = .); 113 114 __data_load_addr__ = etext; 115 .data : AT(__data_load_addr__) { 116 . = ALIGN(8); 117 __data_start__ = .; 118 __global_pointer$ = . + 0x800; 119 *(.data) 120 *(.data*) 121 *(.sdata) 122 *(.sdata*) 123 124 KEEP(*(.jcr)) 125 KEEP(*(.dynamic)) 126 KEEP(*(.got*)) 127 KEEP(*(.got)) 128 KEEP(*(.gcc_except_table)) 129 KEEP(*(.gcc_except_table.*)) 130 131 . = ALIGN(8); 132 PROVIDE(__preinit_array_start = .); 133 KEEP(*(.preinit_array)) 134 PROVIDE(__preinit_array_end = .); 135 136 . = ALIGN(8); 137 PROVIDE(__init_array_start = .); 138 KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*))) 139 KEEP(*(.init_array)) 140 PROVIDE(__init_array_end = .); 141 142 . = ALIGN(8); 143 PROVIDE(__finit_array_start = .); 144 KEEP(*(SORT_BY_INIT_PRIORITY(.finit_array.*))) 145 KEEP(*(.finit_array)) 146 PROVIDE(__finit_array_end = .); 147 148 . = ALIGN(8); 149 KEEP(*crtbegin*.o(.ctors)) 150 KEEP(*(EXCLUDE_FILE (*crtend*.o) .ctors)) 151 KEEP(*(SORT(.ctors.*))) 152 KEEP(*(.ctors)) 153 154 . = ALIGN(8); 155 KEEP(*crtbegin*.o(.dtors)) 156 KEEP(*(EXCLUDE_FILE (*crtend*.o) .dtors)) 157 KEEP(*(SORT(.dtors.*))) 158 KEEP(*(.dtors)) 159 . = ALIGN(8); 160 __data_end__ = .; 161 PROVIDE (__edata = .); 162 PROVIDE (_edata = .); 163 PROVIDE (edata = .); 164 } > AXI_SRAM 165 166 __fast_load_addr__ = etext + SIZEOF(.data); 167 .fast : AT(__fast_load_addr__) { 168 . = ALIGN(8); 169 PROVIDE(__ramfunc_start__ = .); 170 *(.fast) 171 *(.fast.*) 172 . = ALIGN(8); 173 PROVIDE(__ramfunc_end__ = .); 174 } > ILM 175 176 __tdata_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast); 177 .tdata : AT(__tdata_load_addr__) { 178 . = ALIGN(8); 179 PROVIDE(__tdata_start__ = .); 180 *(.tdata) 181 *(.tdata.*) 182 *(.gnu.linkonce.td.*) 183 . = ALIGN(8); 184 PROVIDE(__tdata_end__ = .); 185 } > AXI_SRAM 186 187 .tbss (NOLOAD) : { 188 . = ALIGN(8); 189 PROVIDE(__tbss_start__ = .); 190 __thread_pointer$ = .; 191 *(.tbss) 192 *(.tbss.*) 193 *(.gnu.linkonce.tb.*) 194 *(.tcommon) 195 . = ALIGN(8); 196 PROVIDE(__tbss_end__ = .); 197 } > AXI_SRAM 198 199 __noncacheable_init_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata); 200 .noncacheable.init : AT(__noncacheable_init_load_addr__) { 201 . = ALIGN(8); 202 __noncacheable_init_start__ = .; 203 KEEP(*(.noncacheable.init)) 204 __noncacheable_init_end__ = .; 205 . = ALIGN(8); 206 } > NONCACHEABLE_RAM 207 208 __fast_ram_init_load_addr__ = etext + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata) + SIZEOF(.noncacheable.init); 209 .fast_ram.init : AT(__fast_ram_init_load_addr__) { 210 . = ALIGN(8); 211 __fast_ram_init_start__ = .; 212 KEEP(*(.fast_ram.init)) 213 __fast_ram_init_end__ = .; 214 . = ALIGN(8); 215 } > DLM 216 217 .bss (NOLOAD) : { 218 . = ALIGN(8); 219 __bss_start__ = .; 220 *(.bss) 221 *(.bss*) 222 *(.sbss*) 223 *(.scommon) 224 *(.scommon*) 225 *(.dynsbss*) 226 *(COMMON) 227 . = ALIGN(8); 228 _end = .; 229 __bss_end__ = .; 230 } > AXI_SRAM 231 232 .framebuffer (NOLOAD) : { 233 . = ALIGN(8); 234 KEEP(*(.framebuffer)) 235 . = ALIGN(8); 236 } > AXI_SRAM 237 238 .noncacheable.bss (NOLOAD) : { 239 . = ALIGN(8); 240 KEEP(*(.noncacheable)) 241 __noncacheable_bss_start__ = .; 242 KEEP(*(.noncacheable.bss)) 243 __noncacheable_bss_end__ = .; 244 . = ALIGN(8); 245 } > NONCACHEABLE_RAM 246 247 .ahb_sram (NOLOAD) : { 248 KEEP(*(.ahb_sram)) 249 } > AHB_SRAM 250 251 .apb_sram (NOLOAD) : { 252 KEEP(*(.backup_sram)) 253 } > APB_SRAM 254 255 .sh_mem (NOLOAD) : { 256 KEEP(*(.sh_mem)) 257 } > SHARE_RAM 258 259 .fast_ram.bss (NOLOAD) : { 260 . = ALIGN(8); 261 KEEP(*(.fast_ram)) 262 __fast_ram_bss_start__ = .; 263 KEEP(*(.fast_ram.bss)) 264 __fast_ram_bss_end__ = .; 265 . = ALIGN(8); 266 } > DLM 267 268 .heap (NOLOAD) : { 269 . = ALIGN(8); 270 __heap_start__ = .; 271 . += HEAP_SIZE; 272 __heap_end__ = .; 273 } > DLM 274 275 .stack (NOLOAD) : { 276 . = ALIGN(16); 277 __stack_base__ = .; 278 . += STACK_SIZE; 279 . = ALIGN(16); 280 PROVIDE (_stack = .); 281 PROVIDE (_stack_safe = .); 282 } > DLM 283 284 __noncacheable_start__ = ORIGIN(NONCACHEABLE_RAM); 285 __noncacheable_end__ = ORIGIN(NONCACHEABLE_RAM) + LENGTH(NONCACHEABLE_RAM); 286 __share_mem_start__ = ORIGIN(SHARE_RAM); 287 __share_mem_end__ = ORIGIN(SHARE_RAM) + LENGTH(SHARE_RAM); 288 289 __fw_size__ = SIZEOF(.start) + SIZEOF(.vectors) + SIZEOF(.rel) + SIZEOF(.text) + SIZEOF(.data) + SIZEOF(.fast) + SIZEOF(.tdata) + SIZEOF(.noncacheable.init) + SIZEOF(.fast_ram.init); 290 __last_addr__ = __fast_ram_init_load_addr__ + SIZEOF(.fast_ram.init); 291 ASSERT(((__fw_size__ <= LENGTH(XPI0)) && (__last_addr__ <= (ORIGIN(XPI0) + LENGTH(XPI0)))), "****** FAILED! XPI0 has not enough space! ******") 292} 293