mirror of
https://github.com/espressif/ESP8266_RTOS_SDK.git
synced 2025-05-21 09:05:59 +08:00
feat(esp8266): supports "make size" and its family function
1. support "make size", "make size-files", "make size-components" and "make size-symbols" 2. add esp-idf style link file including "esp8266.ld" and "esp8266.project.ld.in" 3. add link advaced generation file to components of esp8266 and spi_flash
This commit is contained in:
@ -47,11 +47,13 @@ else()
|
||||
set(include_dirs "include" "include/driver")
|
||||
|
||||
set(priv_requires "wpa_supplicant" "log" "spi_flash" "tcpip_adapter" "esp_ringbuf" "bootloader_support" "nvs_flash" "util")
|
||||
set(fragments linker.lf ld/esp8266_fragments.lf)
|
||||
|
||||
idf_component_register(SRCS "${srcs}"
|
||||
INCLUDE_DIRS "${include_dirs}"
|
||||
REQUIRES "${requires}"
|
||||
PRIV_REQUIRES "${priv_requires}"
|
||||
LDFRAGMENTS "${fragments}"
|
||||
REQUIRED_IDF_TARGETS esp8266)
|
||||
|
||||
target_link_libraries(${COMPONENT_LIB} PUBLIC "-L ${CMAKE_CURRENT_SOURCE_DIR}/lib" "-lstdc++")
|
||||
@ -73,7 +75,9 @@ else()
|
||||
endforeach()
|
||||
endif()
|
||||
target_linker_script(${COMPONENT_LIB} INTERFACE "${CMAKE_CURRENT_BINARY_DIR}/esp8266_out.ld")
|
||||
target_linker_script(${COMPONENT_LIB} INTERFACE "${CMAKE_CURRENT_BINARY_DIR}/esp8266_common_out.ld")
|
||||
|
||||
target_linker_script(${COMPONENT_LIB} INTERFACE "${CMAKE_CURRENT_LIST_DIR}/ld/esp8266.project.ld.in"
|
||||
PROCESS "${CMAKE_CURRENT_BINARY_DIR}/ld/esp8266.project.ld")
|
||||
|
||||
target_linker_script(${COMPONENT_LIB} INTERFACE "ld/esp8266.rom.ld")
|
||||
target_linker_script(${COMPONENT_LIB} INTERFACE "ld/esp8266.peripherals.ld")
|
||||
@ -93,13 +97,7 @@ else()
|
||||
MAIN_DEPENDENCY ${LD_DIR}/esp8266.ld ${SDKCONFIG_H}
|
||||
COMMENT "Generating memory map linker script..."
|
||||
VERBATIM)
|
||||
add_custom_command(
|
||||
OUTPUT esp8266_common_out.ld
|
||||
COMMAND "${CMAKE_C_COMPILER}" -C -P -x c -E -o esp8266_common_out.ld -I ${CONFIG_DIR} ${LD_DIR}/esp8266.common.ld
|
||||
MAIN_DEPENDENCY ${LD_DIR}/esp8266.common.ld ${SDKCONFIG_H}
|
||||
COMMENT "Generating section linker script..."
|
||||
VERBATIM)
|
||||
add_custom_target(esp8266_linker_script DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/esp8266_out.ld" "${CMAKE_CURRENT_BINARY_DIR}/esp8266_common_out.ld")
|
||||
add_custom_target(esp8266_linker_script DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/esp8266_out.ld")
|
||||
add_dependencies(${COMPONENT_LIB} esp8266_linker_script)
|
||||
|
||||
if(CONFIG_ESP8266_PHY_INIT_DATA_IN_PARTITION)
|
||||
|
@ -64,6 +64,13 @@ endif # CONFIG_ESP_PHY_INIT_DATA_IN_PARTITION
|
||||
|
||||
endif
|
||||
|
||||
LINKER_SCRIPT_TEMPLATE := $(COMPONENT_PATH)/ld/esp8266.project.ld.in
|
||||
LINKER_SCRIPT_OUTPUT_DIR := $(abspath $(BUILD_DIR_BASE)/esp8266)
|
||||
|
||||
# Target to generate linker script generator from fragments presented by each of
|
||||
# the components
|
||||
$(eval $(call ldgen_process_template,$(LINKER_SCRIPT_TEMPLATE),$(LINKER_SCRIPT_OUTPUT_DIR)/esp8266.project.ld))
|
||||
|
||||
# global CFLAGS for ESP8266
|
||||
CFLAGS += -DICACHE_FLASH
|
||||
|
||||
|
@ -25,11 +25,13 @@ endif
|
||||
#specifies its own scripts.
|
||||
LINKER_SCRIPTS += esp8266.rom.ld esp8266.peripherals.ld
|
||||
|
||||
COMPONENT_ADD_LDFRAGMENTS += ld/esp8266_fragments.lf linker.lf
|
||||
|
||||
COMPONENT_ADD_LDFLAGS += -L$(COMPONENT_PATH)/lib \
|
||||
$(addprefix -l,$(LIBS)) \
|
||||
-L $(COMPONENT_PATH)/ld \
|
||||
-T esp8266_out.ld \
|
||||
-T esp8266_common_out.ld \
|
||||
-T $(COMPONENT_BUILD_DIR)/esp8266.project.ld \
|
||||
-Wl,--no-check-sections \
|
||||
-u call_user_start \
|
||||
$(addprefix -T ,$(LINKER_SCRIPTS))
|
||||
@ -38,20 +40,18 @@ ALL_LIB_FILES := $(patsubst %,$(COMPONENT_PATH)/lib/lib%.a,$(LIBS))
|
||||
|
||||
# final linking of project ELF depends on all binary libraries, and
|
||||
# all linker scripts
|
||||
COMPONENT_ADD_LINKER_DEPS := $(ALL_LIB_FILES) $(addprefix ld/,$(LINKER_SCRIPTS))
|
||||
COMPONENT_ADD_LINKER_DEPS := $(ALL_LIB_FILES) $(addprefix ld/, $(filter-out $(COMPONENT_BUILD_DIR)/esp8266.project.ld, $(LINKER_SCRIPTS))) \
|
||||
$(COMPONENT_BUILD_DIR)/esp8266.project.ld
|
||||
|
||||
# Preprocess esp8266.ld linker script into esp8266_out.ld
|
||||
#
|
||||
# The library doesn't really depend on esp8266_out.ld, but it
|
||||
# saves us from having to add the target to a Makefile.projbuild
|
||||
$(COMPONENT_LIBRARY): esp8266_out.ld esp8266_common_out.ld
|
||||
$(COMPONENT_LIBRARY): esp8266_out.ld
|
||||
|
||||
esp8266_out.ld: $(COMPONENT_PATH)/ld/esp8266.ld ../include/sdkconfig.h
|
||||
$(CC) $(CFLAGS) -I ../include -C -P -x c -E $< -o $@
|
||||
|
||||
esp8266_common_out.ld: $(COMPONENT_PATH)/ld/esp8266.common.ld ../include/sdkconfig.h
|
||||
$(CC) -I ../include -C -P -x c -E $< -o $@
|
||||
|
||||
COMPONENT_EXTRA_CLEAN := esp8266_out.ld
|
||||
COMPONENT_EXTRA_CLEAN := esp8266_out.ld $(COMPONENT_BUILD_DIR)/esp8266.project.ld
|
||||
|
||||
endif
|
@ -181,7 +181,7 @@
|
||||
#define IRAM_SIZE (48 * 1024)
|
||||
|
||||
#define FLASH_BASE (0x40200000)
|
||||
#define FLASH_SIZE (1 * 1024 * 1024)
|
||||
#define FLASH_SIZE (2 * 1024 * 1024)
|
||||
|
||||
#define RTC_SYS_BASE (0x60001000)
|
||||
#define RTC_SYS_SIZE (0x200)
|
||||
|
@ -1,258 +0,0 @@
|
||||
/* This linker script generated from xt-genldscripts.tpp for LSP . */
|
||||
/* Linker Script for ld -N */
|
||||
|
||||
#include "sdkconfig.h"
|
||||
|
||||
PHDRS
|
||||
{
|
||||
dport0_0_phdr PT_LOAD;
|
||||
dram0_0_phdr PT_LOAD;
|
||||
dram0_0_bss_phdr PT_LOAD;
|
||||
iram1_0_phdr PT_LOAD;
|
||||
irom0_0_phdr PT_LOAD;
|
||||
}
|
||||
|
||||
/* Default entry point: */
|
||||
ENTRY(call_user_start)
|
||||
EXTERN(_DebugExceptionVector)
|
||||
EXTERN(_DoubleExceptionVector)
|
||||
EXTERN(_KernelExceptionVector)
|
||||
EXTERN(_NMIExceptionVector)
|
||||
EXTERN(_UserExceptionVector)
|
||||
PROVIDE(_memmap_vecbase_reset = 0x40000000);
|
||||
/* Various memory-map dependent cache attribute settings: */
|
||||
_memmap_cacheattr_wb_base = 0x00000110;
|
||||
_memmap_cacheattr_wt_base = 0x00000110;
|
||||
_memmap_cacheattr_bp_base = 0x00000220;
|
||||
_memmap_cacheattr_unused_mask = 0xFFFFF00F;
|
||||
_memmap_cacheattr_wb_trapnull = 0x2222211F;
|
||||
_memmap_cacheattr_wba_trapnull = 0x2222211F;
|
||||
_memmap_cacheattr_wbna_trapnull = 0x2222211F;
|
||||
_memmap_cacheattr_wt_trapnull = 0x2222211F;
|
||||
_memmap_cacheattr_bp_trapnull = 0x2222222F;
|
||||
_memmap_cacheattr_wb_strict = 0xFFFFF11F;
|
||||
_memmap_cacheattr_wt_strict = 0xFFFFF11F;
|
||||
_memmap_cacheattr_bp_strict = 0xFFFFF22F;
|
||||
_memmap_cacheattr_wb_allvalid = 0x22222112;
|
||||
_memmap_cacheattr_wt_allvalid = 0x22222112;
|
||||
_memmap_cacheattr_bp_allvalid = 0x22222222;
|
||||
PROVIDE(_memmap_cacheattr_reset = _memmap_cacheattr_wb_trapnull);
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
.dport0.rodata : ALIGN(4)
|
||||
{
|
||||
_dport0_rodata_start = ABSOLUTE(.);
|
||||
*(.dport0.rodata)
|
||||
*(.dport.rodata)
|
||||
_dport0_rodata_end = ABSOLUTE(.);
|
||||
} >dport0_0_seg :dport0_0_phdr
|
||||
|
||||
.dport0.literal : ALIGN(4)
|
||||
{
|
||||
_dport0_literal_start = ABSOLUTE(.);
|
||||
*(.dport0.literal)
|
||||
*(.dport.literal)
|
||||
_dport0_literal_end = ABSOLUTE(.);
|
||||
} >dport0_0_seg :dport0_0_phdr
|
||||
|
||||
.dport0.data : ALIGN(4)
|
||||
{
|
||||
_dport0_data_start = ABSOLUTE(.);
|
||||
*(.dport0.data)
|
||||
*(.dport.data)
|
||||
_dport0_data_end = ABSOLUTE(.);
|
||||
} >dport0_0_seg :dport0_0_phdr
|
||||
|
||||
/* RTC memory holds user's data/rodata */
|
||||
.rtc.data :
|
||||
{
|
||||
_rtc_data_start = ABSOLUTE(.);
|
||||
*(.rtc.data .rtc.data.*)
|
||||
*(.rtc.rodata .rtc.rodata.*)
|
||||
_rtc_data_end = ABSOLUTE(.);
|
||||
} > rtc_seg
|
||||
|
||||
.text : ALIGN(4)
|
||||
{
|
||||
_stext = .;
|
||||
_text_start = ABSOLUTE(.);
|
||||
LONG(_text_start)
|
||||
. = ALIGN(16);
|
||||
*(.DebugExceptionVector.text)
|
||||
. = ALIGN(16);
|
||||
*(.NMIExceptionVector.text)
|
||||
. = ALIGN(16);
|
||||
*(.KernelExceptionVector.text)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
. = ALIGN(16);
|
||||
*(.UserExceptionVector.text)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
. = ALIGN(16);
|
||||
*(.DoubleExceptionVector.text)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
LONG(0)
|
||||
. = ALIGN (16);
|
||||
*(.entry.text)
|
||||
*(.init.literal)
|
||||
*(.init)
|
||||
*(.iram1 .iram1.*)
|
||||
*libspi_flash.a:spi_flash_raw.*(.literal .text .literal.* .text.*)
|
||||
#ifdef CONFIG_ESP8266_WIFI_DEBUG_LOG_ENABLE
|
||||
*libpp_dbg.a:(.literal .text .literal.* .text.*)
|
||||
#else
|
||||
*libpp.a:(.literal .text .literal.* .text.*)
|
||||
#endif
|
||||
*libphy.a:(.literal .text .literal.* .text.*)
|
||||
*(.literal .text .stub .gnu.warning .gnu.linkonce.literal.* .gnu.linkonce.t.*.literal .gnu.linkonce.t.*)
|
||||
*(.fini.literal)
|
||||
*(.fini)
|
||||
*(.gnu.version)
|
||||
|
||||
#ifdef CONFIG_LWIP_GLOBAL_DATA_LINK_IRAM
|
||||
*liblwip.a:(.bss .data .bss.* .data.* COMMON)
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_TCPIP_ADAPTER_GLOBAL_DATA_LINK_IRAM
|
||||
*libtcpip_adapter.a:(.bss .data .bss.* .data.* COMMON)
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_ESP8266_CORE_GLOBAL_DATA_LINK_IRAM
|
||||
#ifdef CONFIG_ESP8266_WIFI_DEBUG_LOG_ENABLE
|
||||
*libcore_dbg.a:(.bss .data .bss.* .data.* COMMON)
|
||||
#else
|
||||
*libcore.a:(.bss .data .bss.* .data.* COMMON)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_FREERTOS_GLOBAL_DATA_LINK_IRAM
|
||||
*libfreertos.a:tasks.*(.bss .data .bss.* .data.* COMMON)
|
||||
*libfreertos.a:timers.*(.bss .data .bss.* .data.* COMMON)
|
||||
*libfreertos.a:freertos_hooks.*(.bss .data .bss.* .data.* COMMON)
|
||||
#endif
|
||||
|
||||
#ifdef CONFIG_LINK_ETS_PRINTF_TO_IRAM
|
||||
*libesp8266.a:ets_printf.*(.literal .text .literal.* .text.* .rodata.* .rodata)
|
||||
#endif
|
||||
|
||||
_text_end = ABSOLUTE(.);
|
||||
_etext = .;
|
||||
} >iram1_0_seg :iram1_0_phdr
|
||||
|
||||
.data : ALIGN(4)
|
||||
{
|
||||
_data_start = ABSOLUTE(.);
|
||||
*(.data .data.*)
|
||||
*(.dram0 .dram0.*)
|
||||
*(.gnu.linkonce.d.*)
|
||||
*(.data1)
|
||||
*(.sdata .sdata.*)
|
||||
*(.gnu.linkonce.s.*)
|
||||
*(.sdata2 .sdata2.*)
|
||||
*(.gnu.linkonce.s2.*)
|
||||
*(.jcr)
|
||||
_data_end = ABSOLUTE(.);
|
||||
} >dram0_0_seg :dram0_0_phdr
|
||||
|
||||
.rodata : ALIGN(4)
|
||||
{
|
||||
_rodata_start = ABSOLUTE(.);
|
||||
#ifdef CONFIG_ESP8266_WIFI_DEBUG_LOG_ENABLE
|
||||
*libpp_dbg.a:(.rodata.* .rodata)
|
||||
#else
|
||||
*libpp.a:(.rodata.* .rodata)
|
||||
#endif
|
||||
*liblog.a:(.rodata.* .rodata)
|
||||
*(.gnu.linkonce.r.*)
|
||||
*(.rodata1)
|
||||
__XT_EXCEPTION_TABLE__ = ABSOLUTE(.);
|
||||
*(.xt_except_table)
|
||||
*(.gcc_except_table)
|
||||
*(.gnu.linkonce.e.*)
|
||||
*(.gnu.version_r)
|
||||
*(.eh_frame)
|
||||
. = (. + 3) & ~ 3;
|
||||
/* C++ constructor and destructor tables, properly ordered: */
|
||||
__init_array_start = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.*(.ctors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.*) .ctors))
|
||||
KEEP (*(SORT(.ctors.*)))
|
||||
KEEP (*(.ctors))
|
||||
__init_array_end = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.*(.dtors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.*) .dtors))
|
||||
KEEP (*(SORT(.dtors.*)))
|
||||
KEEP (*(.dtors))
|
||||
/* C++ exception handlers table: */
|
||||
__XT_EXCEPTION_DESCS__ = ABSOLUTE(.);
|
||||
*(.xt_except_desc)
|
||||
*(.gnu.linkonce.h.*)
|
||||
__XT_EXCEPTION_DESCS_END__ = ABSOLUTE(.);
|
||||
*(.xt_except_desc_end)
|
||||
*(.dynamic)
|
||||
*(.gnu.version_d)
|
||||
. = ALIGN(4); /* this table MUST be 4-byte aligned */
|
||||
_bss_table_start = ABSOLUTE(.);
|
||||
LONG(_bss_start)
|
||||
LONG(_bss_end)
|
||||
_bss_table_end = ABSOLUTE(.);
|
||||
_rodata_end = ABSOLUTE(.);
|
||||
} >dram0_0_seg :dram0_0_phdr
|
||||
|
||||
.UserExceptionVector.literal : AT(LOADADDR(.rodata) + (ADDR(.UserExceptionVector.literal) - ADDR(.rodata))) ALIGN(4)
|
||||
{
|
||||
_UserExceptionVector_literal_start = ABSOLUTE(.);
|
||||
*(.UserExceptionVector.literal)
|
||||
_UserExceptionVector_literal_end = ABSOLUTE(.);
|
||||
} >dram0_0_seg :dram0_0_phdr
|
||||
|
||||
.bss ALIGN(8) (NOLOAD) : ALIGN(4)
|
||||
{
|
||||
. = ALIGN (8);
|
||||
_bss_start = ABSOLUTE(.);
|
||||
*(.dynsbss)
|
||||
*(.sbss .sbss.*)
|
||||
*(.gnu.linkonce.sb.*)
|
||||
*(.scommon)
|
||||
*(.sbss2 .sbss2.*)
|
||||
*(.gnu.linkonce.sb2.*)
|
||||
*(.dynbss)
|
||||
*(.bss .bss.*)
|
||||
*(.gnu.linkonce.b.*)
|
||||
*(COMMON)
|
||||
. = ALIGN (8);
|
||||
_bss_end = ABSOLUTE(.);
|
||||
_heap_start = ABSOLUTE(.);
|
||||
/* _stack_sentry = ALIGN(0x8); */
|
||||
} >dram0_0_seg :dram0_0_bss_phdr
|
||||
/* __stack = 0x3ffc8000; */
|
||||
|
||||
.irom0.text : ALIGN(4)
|
||||
{
|
||||
_irom0_text_start = ABSOLUTE(.);
|
||||
*(.user.data .user.data.*)
|
||||
*(.rodata.* .rodata .irom0.literal .irom.literal .irom.text.literal .irom0.text .irom.text)
|
||||
*(.literal.* .text.*)
|
||||
*(.rodata2.* .rodata2 .literal2.* .literal2 .text2.* .text2)
|
||||
|
||||
_irom0_text_end = ABSOLUTE(.);
|
||||
} >irom0_0_seg :irom0_0_phdr
|
||||
|
||||
.lit4 : ALIGN(4)
|
||||
{
|
||||
_lit4_start = ABSOLUTE(.);
|
||||
*(*.lit4)
|
||||
*(.lit4.*)
|
||||
*(.gnu.linkonce.lit4.*)
|
||||
_lit4_end = ABSOLUTE(.);
|
||||
} >iram1_0_seg :iram1_0_phdr
|
||||
|
||||
}
|
@ -1,34 +1,44 @@
|
||||
/* user1.bin @ 0x1000, user2.bin @ 0x10000 */
|
||||
/* ESP8266 Linker Script Memory Layout
|
||||
|
||||
/* Flash Map (1024KB + 1024KB), support 2MB/4MB SPI Flash */
|
||||
/* |..|........................|.....|.....|..|........................|.....|....| */
|
||||
/* ^ ^ ^ ^ ^ ^ ^ ^ */
|
||||
/* |_boot start(0x0000) | | |_pad start(0x100000) | | */
|
||||
/* |_user1 start(0x1000) |_user1 end |_user2 start(0x101000) |_user2 end */
|
||||
/* |_system param symmetric area(0xfb000) |_system param area(0x1fb000) */
|
||||
This file describes the memory layout (memory blocks).
|
||||
|
||||
/* NOTICE: */
|
||||
/* 1. You can change irom0 len, but MUST make sure user1 end not overlap system param symmetric area. */
|
||||
/* 2. Space between user1 end and pad start can be used as user param area. */
|
||||
/* 3. Space between user2 end and system param area can be used as user param area. */
|
||||
/* 4. Don't change any other seg. */
|
||||
/* 5. user1.bin and user2.bin are same in this mode, so upgrade only need one of them. */
|
||||
esp8266.project.ld contains output sections to link compiler output
|
||||
into these memory blocks.
|
||||
|
||||
***
|
||||
|
||||
This linker script is passed through the C preprocessor to include
|
||||
configuration options.
|
||||
|
||||
Please use preprocessor features sparingly! Restrict
|
||||
to simple macros with numeric values, and/or #if/#endif blocks.
|
||||
*/
|
||||
#include "sdkconfig.h"
|
||||
|
||||
|
||||
MEMORY
|
||||
{
|
||||
dport0_0_seg : org = 0x3FF00000, len = 0x10
|
||||
/* All these values assume the flash cache is on, and have the blocks this uses subtracted from the length
|
||||
of the various regions. */
|
||||
|
||||
/* All .data/.bss/heap are in this segment. */
|
||||
dram0_0_seg : org = 0x3FFE8000, len = 0x18000
|
||||
/* IRAM for cpu. The length is due to the cache mode which is able to be set half or full mode. */
|
||||
iram0_0_seg (RX) : org = 0x40100000, len = CONFIG_SOC_IRAM_SIZE
|
||||
|
||||
/* Functions which are critical should be put in this segment. */
|
||||
iram1_0_seg : org = 0x40100000, len = CONFIG_SOC_IRAM_SIZE
|
||||
/* Even though the segment name is iram, it is actually mapped to flash and mapped constant data */
|
||||
iram0_2_seg (RX) : org = 0x40200010 + APP_OFFSET,
|
||||
len = APP_SIZE - 0x10
|
||||
|
||||
/* It is actually mapped to flash. */
|
||||
irom0_0_seg : org = 0x40200010 + APP_OFFSET, len = APP_SIZE - 0x10
|
||||
/*
|
||||
(0x18 offset above is a convenience for the app binary image generation. The .bin file which is flashed
|
||||
to the chip has a 0x10 byte file header. Setting this offset makes it simple to meet the flash cache.)
|
||||
*/
|
||||
|
||||
/* RTC memory, persists over deep sleep. */
|
||||
rtc_seg : org = 0x60001200, len = 0x200
|
||||
|
||||
/* Length of this section is 96KB */
|
||||
dram0_0_seg (RW) : org = 0x3FFE8000, len = 0x18000
|
||||
|
||||
/* (See iram0_2_seg for meaning of 0x10 offset in the above.) */
|
||||
|
||||
/* RTC memory. Persists over deep sleep */
|
||||
rtc_data_seg(RW) : org = 0x60001200, len = 0x200
|
||||
}
|
||||
|
247
components/esp8266/ld/esp8266.project.ld.in
Normal file
247
components/esp8266/ld/esp8266.project.ld.in
Normal file
@ -0,0 +1,247 @@
|
||||
/* Default entry point: */
|
||||
ENTRY(call_start_cpu);
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
/* RTC data section holds RTC wake data/rodata
|
||||
marked with RTC_DATA_ATTR, RTC_RODATA_ATTR attributes.
|
||||
*/
|
||||
.rtc.data :
|
||||
{
|
||||
_rtc_data_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_data]
|
||||
|
||||
_rtc_data_end = ABSOLUTE(.);
|
||||
} > rtc_data_seg
|
||||
|
||||
/* RTC bss */
|
||||
.rtc.bss (NOLOAD) :
|
||||
{
|
||||
_rtc_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_bss]
|
||||
|
||||
_rtc_bss_end = ABSOLUTE(.);
|
||||
} > rtc_data_seg
|
||||
|
||||
/* This section holds data that should not be initialized at power up
|
||||
and will be retained during deep sleep.
|
||||
User data marked with RTC_NOINIT_ATTR will be placed
|
||||
into this section. See the file "esp_attr.h" for more information.
|
||||
*/
|
||||
.rtc_noinit (NOLOAD):
|
||||
{
|
||||
. = ALIGN(4);
|
||||
_rtc_noinit_start = ABSOLUTE(.);
|
||||
*(.rtc_noinit .rtc_noinit.*)
|
||||
. = ALIGN(4) ;
|
||||
_rtc_noinit_end = ABSOLUTE(.);
|
||||
} > rtc_data_seg
|
||||
|
||||
ASSERT(((_rtc_noinit_end - ORIGIN(rtc_data_seg)) <= LENGTH(rtc_data_seg)),
|
||||
"RTC segment data does not fit.")
|
||||
|
||||
/* Send .iram0 code to iram */
|
||||
.iram0.vectors :
|
||||
{
|
||||
_iram_start = ABSOLUTE(.);
|
||||
/* Vectors go to IRAM */
|
||||
_init_start = ABSOLUTE(.);
|
||||
LONG(_iram_start)
|
||||
. = 0x10;
|
||||
KEEP(*(.DebugExceptionVector.text));
|
||||
. = 0x20;
|
||||
KEEP(*(.NMIExceptionVector.text));
|
||||
. = 0x30;
|
||||
KEEP(*(.KernelExceptionVector.text));
|
||||
. = 0x50;
|
||||
KEEP(*(.UserExceptionVector.text));
|
||||
. = 0x70;
|
||||
KEEP(*(.DoubleExceptionVector.text));
|
||||
|
||||
*(.text .literal)
|
||||
|
||||
*(.*Vector.literal)
|
||||
|
||||
*(.UserEnter.literal);
|
||||
*(.UserEnter.text);
|
||||
. = ALIGN (16);
|
||||
*(.entry.text)
|
||||
*(.init.literal)
|
||||
*(.init)
|
||||
_init_end = ABSOLUTE(.);
|
||||
} > iram0_0_seg
|
||||
|
||||
.iram0.text :
|
||||
{
|
||||
/* Code marked as runnning out of IRAM */
|
||||
_iram_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[iram0_text]
|
||||
|
||||
_iram_text_end = ABSOLUTE(.);
|
||||
_iram_end = ABSOLUTE(.);
|
||||
} > iram0_0_seg
|
||||
|
||||
ASSERT(((_iram_text_end - ORIGIN(iram0_0_seg)) <= LENGTH(iram0_0_seg)),
|
||||
"IRAM0 segment data does not fit.")
|
||||
|
||||
.dram0.data :
|
||||
{
|
||||
_data_start = ABSOLUTE(.);
|
||||
*(.gnu.linkonce.d.*)
|
||||
*(.data1)
|
||||
*(.sdata)
|
||||
*(.sdata.*)
|
||||
*(.gnu.linkonce.s.*)
|
||||
*(.sdata2)
|
||||
*(.sdata2.*)
|
||||
*(.gnu.linkonce.s2.*)
|
||||
*(.jcr)
|
||||
*(.dram0 .dram0.*)
|
||||
|
||||
mapping[dram0_data]
|
||||
|
||||
_data_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} > dram0_0_seg
|
||||
|
||||
/*This section holds data that should not be initialized at power up.
|
||||
The section located in Internal SRAM memory region. The macro _NOINIT
|
||||
can be used as attribute to place data into this section.
|
||||
See the esp_attr.h file for more information.
|
||||
*/
|
||||
.noinit (NOLOAD):
|
||||
{
|
||||
. = ALIGN(4);
|
||||
_noinit_start = ABSOLUTE(.);
|
||||
*(.noinit .noinit.*)
|
||||
. = ALIGN(4) ;
|
||||
_noinit_end = ABSOLUTE(.);
|
||||
} > dram0_0_seg
|
||||
|
||||
/* Shared RAM */
|
||||
.dram0.bss (NOLOAD) :
|
||||
{
|
||||
. = ALIGN (8);
|
||||
_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[dram0_bss]
|
||||
|
||||
*(.dynsbss)
|
||||
*(.sbss)
|
||||
*(.sbss.*)
|
||||
*(.gnu.linkonce.sb.*)
|
||||
*(.scommon)
|
||||
*(.sbss2)
|
||||
*(.sbss2.*)
|
||||
*(.gnu.linkonce.sb2.*)
|
||||
*(.dynbss)
|
||||
*(.share.mem)
|
||||
*(.gnu.linkonce.b.*)
|
||||
|
||||
. = ALIGN (8);
|
||||
_bss_end = ABSOLUTE(.);
|
||||
} > dram0_0_seg
|
||||
|
||||
ASSERT(((_bss_end - ORIGIN(dram0_0_seg)) <= LENGTH(dram0_0_seg)),
|
||||
"DRAM segment data does not fit.")
|
||||
|
||||
.flash.text :
|
||||
{
|
||||
_stext = .;
|
||||
_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[flash_text]
|
||||
|
||||
/* For ESP8266 library function */
|
||||
*(.irom0.literal .irom0.text)
|
||||
*(.irom.literal .irom.text .irom.text.literal)
|
||||
*(.text2 .text2.* .literal2 .literal2.*)
|
||||
|
||||
*(.stub .gnu.warning .gnu.linkonce.literal.* .gnu.linkonce.t.*.literal .gnu.linkonce.t.*)
|
||||
*(.irom0.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.fini.literal)
|
||||
*(.fini)
|
||||
*(.gnu.version)
|
||||
_text_end = ABSOLUTE(.);
|
||||
_etext = .;
|
||||
|
||||
/* Similar to _iram_start, this symbol goes here so it is
|
||||
resolved by addr2line in preference to the first symbol in
|
||||
the flash.text segment.
|
||||
*/
|
||||
_flash_cache_start = ABSOLUTE(0);
|
||||
} >iram0_2_seg
|
||||
|
||||
.flash.rodata ALIGN(1) :
|
||||
{
|
||||
_rodata_start = ABSOLUTE(.);
|
||||
|
||||
/**
|
||||
Insert 8 bytes data to make realy rodata section's link address offset to be 0x8,
|
||||
esptool will remove these data and add real segment header
|
||||
*/
|
||||
. = 0x8;
|
||||
|
||||
*(.rodata_desc .rodata_desc.*) /* Should be the first. App version info. DO NOT PUT ANYTHING BEFORE IT! */
|
||||
*(.rodata_custom_desc .rodata_custom_desc.*) /* Should be the second. Custom app version info. DO NOT PUT ANYTHING BEFORE IT! */
|
||||
|
||||
*(.rodata2 .rodata2.*) /* For ESP8266 library function */
|
||||
|
||||
mapping[flash_rodata]
|
||||
|
||||
*(.irom1.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.gnu.linkonce.r.*)
|
||||
*(.rodata1)
|
||||
__XT_EXCEPTION_TABLE_ = ABSOLUTE(.);
|
||||
*(.xt_except_table)
|
||||
*(.gcc_except_table .gcc_except_table.*)
|
||||
*(.gnu.linkonce.e.*)
|
||||
*(.gnu.version_r)
|
||||
. = (. + 3) & ~ 3;
|
||||
__eh_frame = ABSOLUTE(.);
|
||||
KEEP(*(.eh_frame))
|
||||
. = (. + 7) & ~ 3;
|
||||
/* C++ constructor and destructor tables
|
||||
|
||||
Make a point of not including anything from crtbegin.o or crtend.o, as IDF doesn't use toolchain crt
|
||||
*/
|
||||
__init_array_start = ABSOLUTE(.);
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.* *crtbegin.*) .ctors .ctors.*))
|
||||
__init_array_end = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.*(.dtors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.*) .dtors))
|
||||
KEEP (*(SORT(.dtors.*)))
|
||||
KEEP (*(.dtors))
|
||||
/* C++ exception handlers table: */
|
||||
__XT_EXCEPTION_DESCS_ = ABSOLUTE(.);
|
||||
*(.xt_except_desc)
|
||||
*(.gnu.linkonce.h.*)
|
||||
__XT_EXCEPTION_DESCS_END__ = ABSOLUTE(.);
|
||||
*(.xt_except_desc_end)
|
||||
*(.dynamic)
|
||||
*(.gnu.version_d)
|
||||
/* Addresses of memory regions reserved via
|
||||
SOC_RESERVE_MEMORY_REGION() */
|
||||
soc_reserved_memory_region_start = ABSOLUTE(.);
|
||||
KEEP (*(.reserved_memory_address))
|
||||
soc_reserved_memory_region_end = ABSOLUTE(.);
|
||||
_rodata_end = ABSOLUTE(.);
|
||||
/* Literals are also RO data. */
|
||||
_lit4_start = ABSOLUTE(.);
|
||||
*(*.lit4)
|
||||
*(.lit4.*)
|
||||
*(.gnu.linkonce.lit4.*)
|
||||
_lit4_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
_thread_local_start = ABSOLUTE(.);
|
||||
*(.tdata)
|
||||
*(.tdata.*)
|
||||
*(.tbss)
|
||||
*(.tbss.*)
|
||||
_thread_local_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} >iram0_2_seg
|
||||
}
|
89
components/esp8266/ld/esp8266_fragments.lf
Normal file
89
components/esp8266/ld/esp8266_fragments.lf
Normal file
@ -0,0 +1,89 @@
|
||||
[sections:text]
|
||||
entries:
|
||||
.text+
|
||||
.literal+
|
||||
|
||||
[sections:data]
|
||||
entries:
|
||||
.data+
|
||||
|
||||
[sections:bss]
|
||||
entries:
|
||||
.bss+
|
||||
|
||||
[sections:common]
|
||||
entries:
|
||||
COMMON
|
||||
|
||||
[sections:rodata]
|
||||
entries:
|
||||
.rodata+
|
||||
|
||||
[sections:rtc_text]
|
||||
entries:
|
||||
.rtc.text+
|
||||
.rtc.literal
|
||||
|
||||
[sections:rtc_data]
|
||||
entries:
|
||||
.rtc.data+
|
||||
|
||||
[sections:rtc_rodata]
|
||||
entries:
|
||||
.rtc.rodata+
|
||||
|
||||
[sections:rtc_bss]
|
||||
entries:
|
||||
.rtc.bss
|
||||
|
||||
[sections:iram]
|
||||
entries:
|
||||
.iram1+
|
||||
|
||||
[sections:dram]
|
||||
entries:
|
||||
.dram1+
|
||||
|
||||
[sections:wifi_iram]
|
||||
entries:
|
||||
.wifi0iram+
|
||||
|
||||
[scheme:default]
|
||||
entries:
|
||||
text -> flash_text
|
||||
rodata -> flash_rodata
|
||||
data -> dram0_data
|
||||
bss -> dram0_bss
|
||||
common -> dram0_bss
|
||||
iram -> iram0_text
|
||||
dram -> dram0_data
|
||||
rtc_text -> rtc_text
|
||||
rtc_data -> rtc_data
|
||||
rtc_rodata -> rtc_data
|
||||
rtc_bss -> rtc_bss
|
||||
wifi_iram -> flash_text
|
||||
|
||||
[scheme:rtc]
|
||||
entries:
|
||||
text -> rtc_text
|
||||
data -> rtc_data
|
||||
rodata -> rtc_data
|
||||
bss -> rtc_bss
|
||||
common -> rtc_bss
|
||||
|
||||
[scheme:noflash]
|
||||
entries:
|
||||
text -> iram0_text
|
||||
rodata -> dram0_data
|
||||
|
||||
[scheme:noflash_data]
|
||||
entries:
|
||||
rodata -> dram0_data
|
||||
|
||||
[scheme:noflash_text]
|
||||
entries:
|
||||
text -> iram0_text
|
||||
|
||||
[scheme:wifi_iram]
|
||||
entries:
|
||||
wifi_iram -> iram0_text
|
9
components/esp8266/linker.lf
Normal file
9
components/esp8266/linker.lf
Normal file
@ -0,0 +1,9 @@
|
||||
[mapping:pp]
|
||||
archive: libpp.a
|
||||
entries:
|
||||
* (noflash_text)
|
||||
|
||||
[mapping:phy]
|
||||
archive: libphy.a
|
||||
entries:
|
||||
* (noflash_text)
|
@ -55,6 +55,9 @@ static inline int should_load(uint32_t load_addr)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (IS_FLASH(load_addr))
|
||||
return 0;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -104,7 +107,7 @@ static void user_init_entry(void *param)
|
||||
vTaskDelete(NULL);
|
||||
}
|
||||
|
||||
void call_user_start(size_t start_addr)
|
||||
void call_start_cpu(size_t start_addr)
|
||||
{
|
||||
int i;
|
||||
int *p;
|
||||
|
@ -1543,8 +1543,8 @@ class ESP8266V3FirmwareImage(BaseFirmwareImage):
|
||||
checksum = ESPLoader.ESP_CHECKSUM_MAGIC
|
||||
|
||||
# split segments into flash-mapped vs ram-loaded, and take copies so we can mutate them
|
||||
flash_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=lambda s:s.addr) if self.is_flash_addr(s.addr)]
|
||||
ram_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=lambda s:s.addr) if not self.is_flash_addr(s.addr)]
|
||||
flash_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=lambda s:s.addr) if self.is_flash_addr(s.addr) and len(s.data)]
|
||||
ram_segments = [copy.deepcopy(s) for s in sorted(self.segments, key=lambda s:s.addr) if not self.is_flash_addr(s.addr) and len(s.data)]
|
||||
|
||||
IROM_ALIGN = 65536
|
||||
|
||||
@ -1559,6 +1559,7 @@ class ESP8266V3FirmwareImage(BaseFirmwareImage):
|
||||
#print('%x' % last_addr)
|
||||
for segment in flash_segments[1:]:
|
||||
if segment.addr // IROM_ALIGN == last_addr // IROM_ALIGN:
|
||||
print(segment)
|
||||
raise FatalError(("Segment loaded at 0x%08x lands in same 64KB flash mapping as segment loaded at 0x%08x. " +
|
||||
"Can't generate binary. Suggest changing linker script or ELF to merge sections.") %
|
||||
(segment.addr, last_addr))
|
||||
@ -1599,6 +1600,9 @@ class ESP8266V3FirmwareImage(BaseFirmwareImage):
|
||||
checksum = self.save_segment(f, pad_segment, checksum)
|
||||
total_segments += 1
|
||||
else:
|
||||
# remove 8 bytes empty data for insert segment header
|
||||
if segment.name == '.flash.rodata':
|
||||
segment.data = segment.data[8:]
|
||||
# write the flash segment
|
||||
#assert (f.tell() + 8) % IROM_ALIGN == segment.addr % IROM_ALIGN
|
||||
checksum = self.save_segment(f, segment, checksum)
|
||||
|
@ -21,18 +21,18 @@ heap_region_t g_heap_region[HEAP_REGIONS_MAX];
|
||||
*/
|
||||
void heap_caps_init(void)
|
||||
{
|
||||
extern char _heap_start;
|
||||
extern char _bss_end;
|
||||
|
||||
#ifndef CONFIG_SOC_FULL_ICACHE
|
||||
extern char _lit4_end;
|
||||
extern char _iram_end;
|
||||
|
||||
g_heap_region[0].start_addr = (uint8_t *)&_lit4_end;
|
||||
g_heap_region[0].total_size = ((size_t)(0x4010C000 - (uint32_t)&_lit4_end));
|
||||
g_heap_region[0].start_addr = (uint8_t *)&_iram_end;
|
||||
g_heap_region[0].total_size = ((size_t)(0x4010C000 - (uint32_t)&_iram_end));
|
||||
g_heap_region[0].caps = MALLOC_CAP_32BIT;
|
||||
#endif
|
||||
|
||||
g_heap_region[HEAP_REGIONS_MAX - 1].start_addr = (uint8_t *)&_heap_start;
|
||||
g_heap_region[HEAP_REGIONS_MAX - 1].total_size = ((size_t)(0x40000000 - (uint32_t)&_heap_start));
|
||||
g_heap_region[HEAP_REGIONS_MAX - 1].start_addr = (uint8_t *)&_bss_end;
|
||||
g_heap_region[HEAP_REGIONS_MAX - 1].total_size = ((size_t)(0x40000000 - (uint32_t)&_bss_end));
|
||||
g_heap_region[HEAP_REGIONS_MAX - 1].caps = MALLOC_CAP_8BIT | MALLOC_CAP_32BIT | MALLOC_CAP_DMA;
|
||||
|
||||
esp_heap_caps_init_region(g_heap_region, HEAP_REGIONS_MAX);
|
||||
|
@ -1,4 +1,3 @@
|
||||
set(COMPONENT_SRCDIRS ".")
|
||||
set(COMPONENT_ADD_INCLUDEDIRS "include")
|
||||
set(COMPONENT_REQUIRES)
|
||||
register_component()
|
||||
idf_component_register(SRCS "log.c"
|
||||
INCLUDE_DIRS "include"
|
||||
LDFRAGMENTS "linker.lf")
|
||||
|
@ -1,5 +1,2 @@
|
||||
#
|
||||
# Component Makefile
|
||||
#
|
||||
# (Uses default behaviour of compiling all source files in directory, adding 'include' to include path.)
|
||||
|
||||
COMPONENT_ADD_LDFRAGMENTS += linker.lf
|
||||
|
4
components/log/linker.lf
Normal file
4
components/log/linker.lf
Normal file
@ -0,0 +1,4 @@
|
||||
[mapping:log]
|
||||
archive: liblog.a
|
||||
entries:
|
||||
* (noflash_data)
|
@ -1,14 +1,14 @@
|
||||
set(srcs "src/partition"
|
||||
"src/spi_flash_raw.c"
|
||||
"src/spi_flash.c")
|
||||
if(BOOTLOADER_BUILD)
|
||||
# Bootloader needs SPIUnlock from this file, but doesn't
|
||||
# need other parts of this component
|
||||
set(COMPONENT_SRCDIRS "src" "port")
|
||||
set(COMPONENT_PRIV_REQUIRES "bootloader_support")
|
||||
set(srcs "${srcs}" "port/port.c")
|
||||
set(priv_requires "bootloader_support")
|
||||
else()
|
||||
set(COMPONENT_SRCDIRS "src" "port")
|
||||
set(COMPONENT_PRIV_REQUIRES "esp8266" "freertos" "bootloader_support")
|
||||
set(priv_requires "esp8266" "freertos" "bootloader_support")
|
||||
endif()
|
||||
|
||||
set(COMPONENT_ADD_INCLUDEDIRS include)
|
||||
set(COMPONENT_REQUIRES)
|
||||
|
||||
register_component()
|
||||
idf_component_register(SRCS "${srcs}"
|
||||
PRIV_REQUIRES "${priv_requires}"
|
||||
INCLUDE_DIRS "include"
|
||||
LDFRAGMENTS "linker.lf")
|
||||
|
@ -8,6 +8,8 @@ ifdef IS_BOOTLOADER_BUILD
|
||||
COMPONENT_OBJS := src/spi_flash.o src/spi_flash_raw.o
|
||||
endif
|
||||
|
||||
COMPONENT_ADD_LDFRAGMENTS += linker.lf
|
||||
|
||||
CFLAGS += -DPARTITION_QUEUE_HEADER=\"sys/queue.h\"
|
||||
|
||||
ifdef IS_BOOTLOADER_BUILD
|
||||
|
4
components/spi_flash/linker.lf
Normal file
4
components/spi_flash/linker.lf
Normal file
@ -0,0 +1,4 @@
|
||||
[mapping:spi_flash]
|
||||
archive: libspi_flash.a
|
||||
entries:
|
||||
spi_flash_raw (noflash)
|
0
tools/ldgen/__init__.py
Normal file
0
tools/ldgen/__init__.py
Normal file
425
tools/ldgen/fragments.py
Normal file
425
tools/ldgen/fragments.py
Normal file
@ -0,0 +1,425 @@
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import re
|
||||
|
||||
from sdkconfig import SDKConfig
|
||||
from pyparsing import OneOrMore
|
||||
from pyparsing import restOfLine
|
||||
from pyparsing import alphanums
|
||||
from pyparsing import Word
|
||||
from pyparsing import alphas
|
||||
from pyparsing import ParseFatalException
|
||||
from pyparsing import Suppress
|
||||
from pyparsing import Group
|
||||
from pyparsing import Literal
|
||||
from pyparsing import ZeroOrMore
|
||||
from pyparsing import Optional
|
||||
from pyparsing import originalTextFor
|
||||
from pyparsing import Forward
|
||||
from pyparsing import indentedBlock
|
||||
from collections import namedtuple
|
||||
import abc
|
||||
|
||||
|
||||
KeyGrammar = namedtuple("KeyGrammar", "grammar min max required")
|
||||
|
||||
|
||||
class FragmentFile():
|
||||
"""
|
||||
Fragment file internal representation. Parses and stores instances of the fragment definitions
|
||||
contained within the file.
|
||||
"""
|
||||
|
||||
def __init__(self, fragment_file, sdkconfig):
|
||||
try:
|
||||
fragment_file = open(fragment_file, "r")
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
path = os.path.realpath(fragment_file.name)
|
||||
|
||||
indent_stack = [1]
|
||||
|
||||
class parse_ctx:
|
||||
fragment = None # current fragment
|
||||
key = "" # current key
|
||||
keys = list() # list of keys parsed
|
||||
key_grammar = None # current key grammar
|
||||
|
||||
@staticmethod
|
||||
def reset():
|
||||
parse_ctx.fragment_instance = None
|
||||
parse_ctx.key = ""
|
||||
parse_ctx.keys = list()
|
||||
parse_ctx.key_grammar = None
|
||||
|
||||
def fragment_type_parse_action(toks):
|
||||
parse_ctx.reset()
|
||||
parse_ctx.fragment = FRAGMENT_TYPES[toks[0]]() # create instance of the fragment
|
||||
return None
|
||||
|
||||
def expand_conditionals(toks, stmts):
|
||||
try:
|
||||
stmt = toks["value"]
|
||||
stmts.append(stmt)
|
||||
except KeyError:
|
||||
try:
|
||||
conditions = toks["conditional"]
|
||||
for condition in conditions:
|
||||
try:
|
||||
_toks = condition[1]
|
||||
_cond = condition[0]
|
||||
if sdkconfig.evaluate_expression(_cond):
|
||||
expand_conditionals(_toks, stmts)
|
||||
break
|
||||
except IndexError:
|
||||
expand_conditionals(condition[0], stmts)
|
||||
except KeyError:
|
||||
for tok in toks:
|
||||
expand_conditionals(tok, stmts)
|
||||
|
||||
def key_body_parsed(pstr, loc, toks):
|
||||
stmts = list()
|
||||
expand_conditionals(toks, stmts)
|
||||
|
||||
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.min, parse_ctx.key))
|
||||
|
||||
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.max, parse_ctx.key))
|
||||
|
||||
try:
|
||||
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
|
||||
except Exception as e:
|
||||
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, e.message))
|
||||
return None
|
||||
|
||||
key = Word(alphanums + "_") + Suppress(":")
|
||||
key_stmt = Forward()
|
||||
|
||||
condition_block = indentedBlock(key_stmt, indent_stack)
|
||||
key_stmts = OneOrMore(condition_block)
|
||||
key_body = Suppress(key) + key_stmts
|
||||
key_body.setParseAction(key_body_parsed)
|
||||
|
||||
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName("condition")
|
||||
if_condition = Group(Suppress("if") + condition + Suppress(":") + condition_block)
|
||||
elif_condition = Group(Suppress("elif") + condition + Suppress(":") + condition_block)
|
||||
else_condition = Group(Suppress("else") + Suppress(":") + condition_block)
|
||||
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName("conditional")
|
||||
|
||||
def key_parse_action(pstr, loc, toks):
|
||||
key = toks[0]
|
||||
|
||||
if key in parse_ctx.keys:
|
||||
raise ParseFatalException(pstr, loc, "duplicate key '%s' value definition" % parse_ctx.key)
|
||||
|
||||
parse_ctx.key = key
|
||||
parse_ctx.keys.append(key)
|
||||
|
||||
try:
|
||||
parse_ctx.key_grammar = parse_ctx.fragment.get_key_grammars()[key]
|
||||
key_grammar = parse_ctx.key_grammar.grammar
|
||||
except KeyError:
|
||||
raise ParseFatalException(pstr, loc, "key '%s' is not supported by fragment" % key)
|
||||
except Exception as e:
|
||||
raise ParseFatalException(pstr, loc, "unable to parse key '%s'; %s" % (key, e.message))
|
||||
|
||||
key_stmt << (conditional | Group(key_grammar).setResultsName("value"))
|
||||
|
||||
return None
|
||||
|
||||
def name_parse_action(pstr, loc, toks):
|
||||
parse_ctx.fragment.name = toks[0]
|
||||
|
||||
key.setParseAction(key_parse_action)
|
||||
|
||||
ftype = Word(alphas).setParseAction(fragment_type_parse_action)
|
||||
fid = Suppress(":") + Word(alphanums + "_.").setResultsName("name")
|
||||
fid.setParseAction(name_parse_action)
|
||||
header = Suppress("[") + ftype + fid + Suppress("]")
|
||||
|
||||
def fragment_parse_action(pstr, loc, toks):
|
||||
key_grammars = parse_ctx.fragment.get_key_grammars()
|
||||
required_keys = set([k for (k,v) in key_grammars.items() if v.required])
|
||||
present_keys = required_keys.intersection(set(parse_ctx.keys))
|
||||
if present_keys != required_keys:
|
||||
raise ParseFatalException(pstr, loc, "required keys %s for fragment not found" %
|
||||
list(required_keys - present_keys))
|
||||
return parse_ctx.fragment
|
||||
|
||||
fragment_stmt = Forward()
|
||||
fragment_block = indentedBlock(fragment_stmt, indent_stack)
|
||||
|
||||
fragment_if_condition = Group(Suppress("if") + condition + Suppress(":") + fragment_block)
|
||||
fragment_elif_condition = Group(Suppress("elif") + condition + Suppress(":") + fragment_block)
|
||||
fragment_else_condition = Group(Suppress("else") + Suppress(":") + fragment_block)
|
||||
fragment_conditional = (fragment_if_condition + Optional(OneOrMore(fragment_elif_condition)) +
|
||||
Optional(fragment_else_condition)).setResultsName("conditional")
|
||||
|
||||
fragment = (header + OneOrMore(indentedBlock(key_body, indent_stack, False))).setResultsName("value")
|
||||
fragment.setParseAction(fragment_parse_action)
|
||||
fragment.ignore("#" + restOfLine)
|
||||
|
||||
deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName("value")
|
||||
|
||||
fragment_stmt << (Group(deprecated_mapping) | Group(fragment) | Group(fragment_conditional))
|
||||
|
||||
def fragment_stmt_parsed(pstr, loc, toks):
|
||||
stmts = list()
|
||||
expand_conditionals(toks, stmts)
|
||||
return stmts
|
||||
|
||||
parser = ZeroOrMore(fragment_stmt)
|
||||
parser.setParseAction(fragment_stmt_parsed)
|
||||
|
||||
self.fragments = parser.parseFile(fragment_file, parseAll=True)
|
||||
|
||||
for fragment in self.fragments:
|
||||
fragment.path = path
|
||||
|
||||
|
||||
class Fragment():
|
||||
__metaclass__ = abc.ABCMeta
|
||||
"""
|
||||
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
|
||||
such as checking the validity of the fragment name and getting the entry values.
|
||||
"""
|
||||
|
||||
IDENTIFIER = Word(alphas + "_", alphanums + "_")
|
||||
ENTITY = Word(alphanums + ".-_$")
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_key_value(self, key, parse_results):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_key_grammars(self):
|
||||
pass
|
||||
|
||||
|
||||
class Sections(Fragment):
|
||||
|
||||
grammars = {
|
||||
"entries": KeyGrammar(Word(alphanums + "+.").setResultsName("section"), 1, None, True)
|
||||
}
|
||||
|
||||
"""
|
||||
Utility function that returns a list of sections given a sections fragment entry,
|
||||
with the '+' notation and symbol concatenation handled automatically.
|
||||
"""
|
||||
@staticmethod
|
||||
def get_section_data_from_entry(sections_entry, symbol=None):
|
||||
if not symbol:
|
||||
sections = list()
|
||||
sections.append(sections_entry.replace("+", ""))
|
||||
sections.append(sections_entry.replace("+", ".*"))
|
||||
return sections
|
||||
else:
|
||||
if sections_entry.endswith("+"):
|
||||
section = sections_entry.replace("+", ".*")
|
||||
expansion = section.replace(".*", "." + symbol)
|
||||
return (section, expansion)
|
||||
else:
|
||||
return (sections_entry, None)
|
||||
|
||||
def set_key_value(self, key, parse_results):
|
||||
if key == "entries":
|
||||
self.entries = set()
|
||||
for result in parse_results:
|
||||
self.entries.add(result["section"])
|
||||
|
||||
def get_key_grammars(self):
|
||||
return self.__class__.grammars
|
||||
|
||||
|
||||
class Scheme(Fragment):
|
||||
"""
|
||||
Encapsulates a scheme fragment, which defines what target input sections are placed under.
|
||||
"""
|
||||
|
||||
grammars = {
|
||||
"entries": KeyGrammar(Fragment.IDENTIFIER.setResultsName("sections") + Suppress("->") +
|
||||
Fragment.IDENTIFIER.setResultsName("target"), 1, None, True)
|
||||
}
|
||||
|
||||
def set_key_value(self, key, parse_results):
|
||||
if key == "entries":
|
||||
self.entries = set()
|
||||
for result in parse_results:
|
||||
self.entries.add((result["sections"], result["target"]))
|
||||
|
||||
def get_key_grammars(self):
|
||||
return self.__class__.grammars
|
||||
|
||||
|
||||
class Mapping(Fragment):
|
||||
"""
|
||||
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
|
||||
"""
|
||||
|
||||
MAPPING_ALL_OBJECTS = "*"
|
||||
|
||||
def __init__(self):
|
||||
Fragment.__init__(self)
|
||||
self.entries = set()
|
||||
self.deprecated = False
|
||||
|
||||
def set_key_value(self, key, parse_results):
|
||||
if key == "archive":
|
||||
self.archive = parse_results[0]["archive"]
|
||||
elif key == "entries":
|
||||
for result in parse_results:
|
||||
obj = None
|
||||
symbol = None
|
||||
scheme = None
|
||||
|
||||
try:
|
||||
obj = result["object"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
symbol = result["symbol"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
scheme = result["scheme"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
self.entries.add((obj, symbol, scheme))
|
||||
|
||||
def get_key_grammars(self):
|
||||
# There are three possible patterns for mapping entries:
|
||||
# obj:symbol (scheme)
|
||||
# obj (scheme)
|
||||
# * (scheme)
|
||||
obj = Fragment.ENTITY.setResultsName("object")
|
||||
symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol")
|
||||
scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")")
|
||||
|
||||
pattern1 = obj + symbol + scheme
|
||||
pattern2 = obj + scheme
|
||||
pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme
|
||||
|
||||
entry = pattern1 | pattern2 | pattern3
|
||||
|
||||
grammars = {
|
||||
"archive": KeyGrammar(Fragment.ENTITY.setResultsName("archive"), 1, 1, True),
|
||||
"entries": KeyGrammar(entry, 0, None, True)
|
||||
}
|
||||
|
||||
return grammars
|
||||
|
||||
|
||||
class DeprecatedMapping():
|
||||
"""
|
||||
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
|
||||
"""
|
||||
|
||||
# Name of the default condition entry
|
||||
DEFAULT_CONDITION = "default"
|
||||
MAPPING_ALL_OBJECTS = "*"
|
||||
|
||||
@staticmethod
|
||||
def get_fragment_grammar(sdkconfig, fragment_file):
|
||||
|
||||
# Match header [mapping]
|
||||
header = Suppress("[") + Suppress("mapping") + Suppress("]")
|
||||
|
||||
# There are three possible patterns for mapping entries:
|
||||
# obj:symbol (scheme)
|
||||
# obj (scheme)
|
||||
# * (scheme)
|
||||
obj = Fragment.ENTITY.setResultsName("object")
|
||||
symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol")
|
||||
scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")")
|
||||
|
||||
pattern1 = Group(obj + symbol + scheme)
|
||||
pattern2 = Group(obj + scheme)
|
||||
pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme)
|
||||
|
||||
mapping_entry = pattern1 | pattern2 | pattern3
|
||||
|
||||
# To simplify parsing, classify groups of condition-mapping entry into two types: normal and default
|
||||
# A normal grouping is one with a non-default condition. The default grouping is one which contains the
|
||||
# default condition
|
||||
mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName("mappings")
|
||||
|
||||
normal_condition = Suppress(":") + originalTextFor(SDKConfig.get_expression_grammar())
|
||||
default_condition = Optional(Suppress(":") + Literal(DeprecatedMapping.DEFAULT_CONDITION))
|
||||
|
||||
normal_group = Group(normal_condition.setResultsName("condition") + mapping_entries)
|
||||
default_group = Group(default_condition + mapping_entries).setResultsName("default_group")
|
||||
|
||||
normal_groups = Group(ZeroOrMore(normal_group)).setResultsName("normal_groups")
|
||||
|
||||
# Any mapping fragment definition can have zero or more normal group and only one default group as a last entry.
|
||||
archive = Suppress("archive") + Suppress(":") + Fragment.ENTITY.setResultsName("archive")
|
||||
entries = Suppress("entries") + Suppress(":") + (normal_groups + default_group).setResultsName("entries")
|
||||
|
||||
mapping = Group(header + archive + entries)
|
||||
mapping.ignore("#" + restOfLine)
|
||||
|
||||
def parsed_deprecated_mapping(pstr, loc, toks):
|
||||
fragment = Mapping()
|
||||
fragment.archive = toks[0].archive
|
||||
fragment.name = re.sub(r"[^0-9a-zA-Z]+", "_", fragment.archive)
|
||||
fragment.deprecated = True
|
||||
|
||||
fragment.entries = set()
|
||||
condition_true = False
|
||||
for entries in toks[0].entries[0]:
|
||||
condition = next(iter(entries.condition.asList())).strip()
|
||||
condition_val = sdkconfig.evaluate_expression(condition)
|
||||
|
||||
if condition_val:
|
||||
for entry in entries[1]:
|
||||
fragment.entries.add((entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
||||
condition_true = True
|
||||
break
|
||||
|
||||
if not fragment.entries and not condition_true:
|
||||
try:
|
||||
entries = toks[0].entries[1][1]
|
||||
except IndexError:
|
||||
entries = toks[0].entries[1][0]
|
||||
for entry in entries:
|
||||
fragment.entries.add((entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
||||
|
||||
if not fragment.entries:
|
||||
fragment.entries.add(("*", None, "default"))
|
||||
|
||||
dep_warning = str(ParseFatalException(pstr, loc,
|
||||
"Warning: Deprecated old-style mapping fragment parsed in file %s." % fragment_file))
|
||||
|
||||
print(dep_warning)
|
||||
return fragment
|
||||
|
||||
mapping.setParseAction(parsed_deprecated_mapping)
|
||||
return mapping
|
||||
|
||||
|
||||
FRAGMENT_TYPES = {
|
||||
"sections": Sections,
|
||||
"scheme": Scheme,
|
||||
"mapping": Mapping
|
||||
}
|
636
tools/ldgen/generation.py
Normal file
636
tools/ldgen/generation.py
Normal file
@ -0,0 +1,636 @@
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import os
|
||||
import fnmatch
|
||||
|
||||
from fragments import Sections, Scheme, Mapping, Fragment
|
||||
from pyparsing import Suppress, White, ParseException, Literal, Group, ZeroOrMore
|
||||
from pyparsing import Word, OneOrMore, nums, alphanums, alphas, Optional, LineEnd, printables
|
||||
from ldgen_common import LdGenFailure
|
||||
|
||||
|
||||
class PlacementRule():
|
||||
"""
|
||||
Encapsulates a generated placement rule placed under a target
|
||||
"""
|
||||
|
||||
DEFAULT_SPECIFICITY = 0
|
||||
ARCHIVE_SPECIFICITY = 1
|
||||
OBJECT_SPECIFICITY = 2
|
||||
SYMBOL_SPECIFICITY = 3
|
||||
|
||||
class __container():
|
||||
def __init__(self, content):
|
||||
self.content = content
|
||||
|
||||
__metadata = collections.namedtuple("__metadata", "excludes expansions expanded")
|
||||
|
||||
def __init__(self, archive, obj, symbol, sections, target):
|
||||
if archive == "*":
|
||||
archive = None
|
||||
|
||||
if obj == "*":
|
||||
obj = None
|
||||
|
||||
self.archive = archive
|
||||
self.obj = obj
|
||||
self.symbol = symbol
|
||||
self.target = target
|
||||
self.sections = dict()
|
||||
|
||||
self.specificity = 0
|
||||
self.specificity += 1 if self.archive else 0
|
||||
self.specificity += 1 if (self.obj and not self.obj == '*') else 0
|
||||
self.specificity += 1 if self.symbol else 0
|
||||
|
||||
for section in sections:
|
||||
section_data = Sections.get_section_data_from_entry(section, self.symbol)
|
||||
|
||||
if not self.symbol:
|
||||
for s in section_data:
|
||||
metadata = self.__metadata(self.__container([]), self.__container([]), self.__container(False))
|
||||
self.sections[s] = metadata
|
||||
else:
|
||||
(section, expansion) = section_data
|
||||
if expansion:
|
||||
metadata = self.__metadata(self.__container([]), self.__container([expansion]), self.__container(True))
|
||||
self.sections[section] = metadata
|
||||
|
||||
def get_section_names(self):
|
||||
return self.sections.keys()
|
||||
|
||||
def add_exclusion(self, other, sections_infos=None):
|
||||
# Utility functions for this method
|
||||
def do_section_expansion(rule, section):
|
||||
if section in rule.get_section_names():
|
||||
sections_in_obj = sections_infos.get_obj_sections(rule.archive, rule.obj)
|
||||
|
||||
expansions = fnmatch.filter(sections_in_obj, section)
|
||||
return expansions
|
||||
|
||||
def remove_section_expansions(rule, section, expansions):
|
||||
existing_expansions = self.sections[section].expansions.content
|
||||
self.sections[section].expansions.content = [e for e in existing_expansions if e not in expansions]
|
||||
|
||||
# Exit immediately if the exclusion to be added is more general than this rule.
|
||||
if not other.is_more_specific_rule_of(self):
|
||||
return
|
||||
|
||||
for section in self.get_sections_intersection(other):
|
||||
if(other.specificity == PlacementRule.SYMBOL_SPECIFICITY):
|
||||
# If this sections has not been expanded previously, expand now and keep track.
|
||||
previously_expanded = self.sections[section].expanded.content
|
||||
if not previously_expanded:
|
||||
expansions = do_section_expansion(self, section)
|
||||
if expansions:
|
||||
self.sections[section].expansions.content = expansions
|
||||
self.sections[section].expanded.content = True
|
||||
previously_expanded = True
|
||||
|
||||
# Remove the sections corresponding to the symbol name
|
||||
remove_section_expansions(self, section, other.sections[section].expansions.content)
|
||||
|
||||
# If it has been expanded previously but now the expansions list is empty,
|
||||
# it means adding exclusions has exhausted the list. Remove the section entirely.
|
||||
if previously_expanded and not self.sections[section].expanded.content:
|
||||
del self.sections[section]
|
||||
else:
|
||||
# A rule section can have multiple rule sections excluded from it. Get the
|
||||
# most specific rule from the list, and if an even more specific rule is found,
|
||||
# replace it entirely. Otherwise, keep appending.
|
||||
exclusions = self.sections[section].excludes
|
||||
exclusions_list = exclusions.content if exclusions.content is not None else []
|
||||
exclusions_to_remove = filter(lambda r: r.is_more_specific_rule_of(other), exclusions_list)
|
||||
|
||||
remaining_exclusions = [e for e in exclusions_list if e not in exclusions_to_remove]
|
||||
remaining_exclusions.append(other)
|
||||
|
||||
self.sections[section].excludes.content = remaining_exclusions
|
||||
|
||||
def get_sections_intersection(self, other):
|
||||
return set(self.sections.keys()).intersection(set(other.sections.keys()))
|
||||
|
||||
def is_more_specific_rule_of(self, other):
|
||||
if (self.specificity <= other.specificity):
|
||||
return False
|
||||
|
||||
# Compare archive, obj and target
|
||||
for entity_index in range(1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def maps_same_entities_as(self, other):
|
||||
if self.specificity != other.specificity:
|
||||
return False
|
||||
|
||||
# Compare archive, obj and target
|
||||
for entity_index in range(1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == PlacementRule.ARCHIVE_SPECIFICITY:
|
||||
return self.archive
|
||||
elif key == PlacementRule.OBJECT_SPECIFICITY:
|
||||
return self.obj
|
||||
elif key == PlacementRule.SYMBOL_SPECIFICITY:
|
||||
return self.symbol
|
||||
else:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
sorted_sections = sorted(self.get_section_names())
|
||||
|
||||
sections_string = list()
|
||||
|
||||
for section in sorted_sections:
|
||||
exclusions = self.sections[section].excludes.content
|
||||
|
||||
exclusion_string = None
|
||||
|
||||
if exclusions:
|
||||
exclusion_string = " ".join(map(lambda e: "*" + e.archive + (":" + e.obj + ".*" if e.obj else ""), exclusions))
|
||||
exclusion_string = "EXCLUDE_FILE(" + exclusion_string + ")"
|
||||
else:
|
||||
exclusion_string = ""
|
||||
|
||||
section_string = None
|
||||
exclusion_section_string = None
|
||||
|
||||
section_expansions = self.sections[section].expansions.content
|
||||
section_expanded = self.sections[section].expanded.content
|
||||
|
||||
if section_expansions and section_expanded:
|
||||
section_string = " ".join(section_expansions)
|
||||
exclusion_section_string = section_string
|
||||
else:
|
||||
section_string = section
|
||||
exclusion_section_string = exclusion_string + " " + section_string
|
||||
|
||||
sections_string.append(exclusion_section_string)
|
||||
|
||||
sections_string = " ".join(sections_string)
|
||||
|
||||
archive = str(self.archive) if self.archive else ""
|
||||
obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else ""
|
||||
|
||||
# Handle output string generation based on information available
|
||||
if self.specificity == PlacementRule.DEFAULT_SPECIFICITY:
|
||||
rule_string = "*(%s)" % (sections_string)
|
||||
elif self.specificity == PlacementRule.ARCHIVE_SPECIFICITY:
|
||||
rule_string = "*%s:(%s)" % (archive, sections_string)
|
||||
else:
|
||||
rule_string = "*%s:%s(%s)" % (archive, obj, sections_string)
|
||||
|
||||
return rule_string
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
def exclusions_set(exclusions):
|
||||
exclusions_set = {(e.archive, e.obj, e.symbol, e.target) for e in exclusions}
|
||||
return exclusions_set
|
||||
|
||||
if self.archive != other.archive:
|
||||
return False
|
||||
|
||||
if self.obj != other.obj:
|
||||
return False
|
||||
|
||||
if self.symbol != other.symbol:
|
||||
return False
|
||||
|
||||
if set(self.sections.keys()) != set(other.sections.keys()):
|
||||
return False
|
||||
|
||||
for (section, metadata) in self.sections.items():
|
||||
|
||||
self_meta = metadata
|
||||
other_meta = other.sections[section]
|
||||
|
||||
if exclusions_set(self_meta.excludes.content) != exclusions_set(other_meta.excludes.content):
|
||||
return False
|
||||
|
||||
if set(self_meta.expansions.content) != set(other_meta.expansions.content):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __iter__(self):
|
||||
yield self.archive
|
||||
yield self.obj
|
||||
yield self.symbol
|
||||
raise StopIteration
|
||||
|
||||
|
||||
class GenerationModel:
|
||||
"""
|
||||
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
|
||||
"""
|
||||
|
||||
DEFAULT_SCHEME = "default"
|
||||
|
||||
def __init__(self):
|
||||
self.schemes = {}
|
||||
self.sections = {}
|
||||
self.mappings = {}
|
||||
|
||||
def _add_mapping_rules(self, archive, obj, symbol, scheme_name, scheme_dict, rules):
|
||||
# Use an ordinary dictionary to raise exception on non-existing keys
|
||||
temp_dict = dict(scheme_dict)
|
||||
|
||||
sections_bucket = temp_dict[scheme_name]
|
||||
|
||||
for (target, sections) in sections_bucket.items():
|
||||
section_entries = []
|
||||
|
||||
for section in sections:
|
||||
section_entries.extend(section.entries)
|
||||
|
||||
rule = PlacementRule(archive, obj, symbol, section_entries, target)
|
||||
|
||||
if rule not in rules:
|
||||
rules.append(rule)
|
||||
|
||||
def _build_scheme_dictionary(self):
|
||||
scheme_dictionary = collections.defaultdict(dict)
|
||||
|
||||
# Collect sections into buckets based on target name
|
||||
for scheme in self.schemes.values():
|
||||
sections_bucket = collections.defaultdict(list)
|
||||
|
||||
for (sections_name, target_name) in scheme.entries:
|
||||
# Get the sections under the bucket 'target_name'. If this bucket does not exist
|
||||
# is is created automatically
|
||||
sections_in_bucket = sections_bucket[target_name]
|
||||
|
||||
try:
|
||||
sections = self.sections[sections_name]
|
||||
except KeyError:
|
||||
message = GenerationException.UNDEFINED_REFERENCE + " to sections '" + sections + "'."
|
||||
raise GenerationException(message, scheme)
|
||||
|
||||
sections_in_bucket.append(sections)
|
||||
|
||||
scheme_dictionary[scheme.name] = sections_bucket
|
||||
|
||||
# Search for and raise exception on first instance of sections mapped to multiple targets
|
||||
for (scheme_name, sections_bucket) in scheme_dictionary.items():
|
||||
for sections_a, sections_b in itertools.combinations(sections_bucket.values(), 2):
|
||||
set_a = set()
|
||||
set_b = set()
|
||||
|
||||
for sections in sections_a:
|
||||
set_a.update(sections.entries)
|
||||
|
||||
for sections in sections_b:
|
||||
set_b.update(sections.entries)
|
||||
|
||||
intersection = set_a.intersection(set_b)
|
||||
|
||||
# If the intersection is a non-empty set, it means sections are mapped to multiple
|
||||
# targets. Raise exception.
|
||||
if intersection:
|
||||
scheme = self.schemes[scheme_name]
|
||||
message = "Sections " + str(intersection) + " mapped to multiple targets."
|
||||
raise GenerationException(message, scheme)
|
||||
|
||||
return scheme_dictionary
|
||||
|
||||
def generate_rules(self, sections_infos):
|
||||
placement_rules = collections.defaultdict(list)
|
||||
|
||||
scheme_dictionary = self._build_scheme_dictionary()
|
||||
|
||||
# Generate default rules
|
||||
default_rules = list()
|
||||
self._add_mapping_rules(None, None, None, GenerationModel.DEFAULT_SCHEME, scheme_dictionary, default_rules)
|
||||
|
||||
all_mapping_rules = collections.defaultdict(list)
|
||||
|
||||
# Generate rules based on mapping fragments
|
||||
for mapping in self.mappings.values():
|
||||
archive = mapping.archive
|
||||
mapping_rules = all_mapping_rules[archive]
|
||||
for (obj, symbol, scheme_name) in mapping.entries:
|
||||
try:
|
||||
if not (obj == Mapping.MAPPING_ALL_OBJECTS and symbol is None and
|
||||
scheme_name == GenerationModel.DEFAULT_SCHEME):
|
||||
self._add_mapping_rules(archive, obj, symbol, scheme_name, scheme_dictionary, mapping_rules)
|
||||
except KeyError:
|
||||
message = GenerationException.UNDEFINED_REFERENCE + " to scheme '" + scheme_name + "'."
|
||||
raise GenerationException(message, mapping)
|
||||
|
||||
# Detect rule conflicts
|
||||
for mapping_rules in all_mapping_rules.items():
|
||||
self._detect_conflicts(mapping_rules)
|
||||
|
||||
# Add exclusions
|
||||
for mapping_rules in all_mapping_rules.values():
|
||||
self._create_exclusions(mapping_rules, default_rules, sections_infos)
|
||||
|
||||
# Add the default rules grouped by target
|
||||
for default_rule in default_rules:
|
||||
existing_rules = placement_rules[default_rule.target]
|
||||
if default_rule.get_section_names():
|
||||
existing_rules.append(default_rule)
|
||||
|
||||
for mapping_rules in all_mapping_rules.values():
|
||||
# Add the mapping rules grouped by target
|
||||
for mapping_rule in mapping_rules:
|
||||
existing_rules = placement_rules[mapping_rule.target]
|
||||
if mapping_rule.get_section_names():
|
||||
existing_rules.append(mapping_rule)
|
||||
|
||||
return placement_rules
|
||||
|
||||
def _detect_conflicts(self, rules):
|
||||
(archive, rules_list) = rules
|
||||
|
||||
for specificity in range(0, PlacementRule.OBJECT_SPECIFICITY + 1):
|
||||
rules_with_specificity = filter(lambda r: r.specificity == specificity, rules_list)
|
||||
|
||||
for rule_a, rule_b in itertools.combinations(rules_with_specificity, 2):
|
||||
intersections = rule_a.get_sections_intersection(rule_b)
|
||||
|
||||
if intersections and rule_a.maps_same_entities_as(rule_b):
|
||||
rules_string = str([str(rule_a), str(rule_b)])
|
||||
message = "Rules " + rules_string + " map sections " + str(list(intersections)) + " into multiple targets."
|
||||
raise GenerationException(message)
|
||||
|
||||
def _create_extra_rules(self, rules):
|
||||
# This function generates extra rules for symbol specific rules. The reason for generating extra rules is to isolate,
|
||||
# as much as possible, rules that require expansion. Particularly, object specific extra rules are generated.
|
||||
rules_to_process = sorted(rules, key=lambda r: r.specificity)
|
||||
symbol_specific_rules = list(filter(lambda r: r.specificity == PlacementRule.SYMBOL_SPECIFICITY, rules_to_process))
|
||||
|
||||
extra_rules = dict()
|
||||
|
||||
for symbol_specific_rule in symbol_specific_rules:
|
||||
extra_rule_candidate = {s: None for s in symbol_specific_rule.get_section_names()}
|
||||
|
||||
super_rules = filter(lambda r: symbol_specific_rule.is_more_specific_rule_of(r), rules_to_process)
|
||||
|
||||
# Take a look at the existing rules that are more general than the current symbol-specific rule.
|
||||
# Only generate an extra rule if there is no existing object specific rule for that section
|
||||
for super_rule in super_rules:
|
||||
intersections = symbol_specific_rule.get_sections_intersection(super_rule)
|
||||
for intersection in intersections:
|
||||
if super_rule.specificity != PlacementRule.OBJECT_SPECIFICITY:
|
||||
extra_rule_candidate[intersection] = super_rule
|
||||
else:
|
||||
extra_rule_candidate[intersection] = None
|
||||
|
||||
# Generate the extra rules for the symbol specific rule section, keeping track of the generated extra rules
|
||||
for (section, section_rule) in extra_rule_candidate.items():
|
||||
if section_rule:
|
||||
extra_rule = None
|
||||
extra_rules_key = (symbol_specific_rule.archive, symbol_specific_rule.obj, section_rule.target)
|
||||
|
||||
try:
|
||||
extra_rule = extra_rules[extra_rules_key]
|
||||
|
||||
if section not in extra_rule.get_section_names():
|
||||
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol,
|
||||
list(extra_rule.get_section_names()) + [section], extra_rule.target)
|
||||
extra_rules[extra_rules_key] = new_rule
|
||||
except KeyError:
|
||||
extra_rule = PlacementRule(symbol_specific_rule.archive, symbol_specific_rule.obj, None, [section], section_rule.target)
|
||||
extra_rules[extra_rules_key] = extra_rule
|
||||
|
||||
return extra_rules.values()
|
||||
|
||||
def _create_exclusions(self, mapping_rules, default_rules, sections_info):
|
||||
rules = list(default_rules)
|
||||
rules.extend(mapping_rules)
|
||||
|
||||
extra_rules = self._create_extra_rules(rules)
|
||||
|
||||
mapping_rules.extend(extra_rules)
|
||||
rules.extend(extra_rules)
|
||||
|
||||
# Sort the rules by means of how specific they are. Sort by specificity from lowest to highest
|
||||
# * -> lib:* -> lib:obj -> lib:obj:symbol
|
||||
sorted_rules = sorted(rules, key=lambda r: r.specificity)
|
||||
|
||||
# Now that the rules have been sorted, loop through each rule, and then loop
|
||||
# through rules below it (higher indeces), adding exclusions whenever appropriate.
|
||||
for general_rule in sorted_rules:
|
||||
for specific_rule in reversed(sorted_rules):
|
||||
if (specific_rule.specificity > general_rule.specificity and
|
||||
specific_rule.specificity != PlacementRule.SYMBOL_SPECIFICITY) or \
|
||||
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and
|
||||
general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY):
|
||||
general_rule.add_exclusion(specific_rule, sections_info)
|
||||
|
||||
def add_fragments_from_file(self, fragment_file):
|
||||
for fragment in fragment_file.fragments:
|
||||
dict_to_append_to = None
|
||||
|
||||
if isinstance(fragment, Mapping) and fragment.deprecated and fragment.name in self.mappings.keys():
|
||||
self.mappings[fragment.name].entries |= fragment.entries
|
||||
else:
|
||||
if isinstance(fragment, Scheme):
|
||||
dict_to_append_to = self.schemes
|
||||
elif isinstance(fragment, Sections):
|
||||
dict_to_append_to = self.sections
|
||||
else:
|
||||
dict_to_append_to = self.mappings
|
||||
|
||||
# Raise exception when the fragment of the same type is already in the stored fragments
|
||||
if fragment.name in dict_to_append_to.keys():
|
||||
stored = dict_to_append_to[fragment.name].path
|
||||
new = fragment.path
|
||||
message = "Duplicate definition of fragment '%s' found in %s and %s." % (fragment.name, stored, new)
|
||||
raise GenerationException(message)
|
||||
|
||||
dict_to_append_to[fragment.name] = fragment
|
||||
|
||||
|
||||
class TemplateModel:
|
||||
"""
|
||||
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
|
||||
final output.
|
||||
"""
|
||||
|
||||
Marker = collections.namedtuple("Marker", "target indent rules")
|
||||
|
||||
def __init__(self, template_file):
|
||||
self.members = []
|
||||
self.file = os.path.realpath(template_file.name)
|
||||
|
||||
self._generate_members(template_file)
|
||||
|
||||
def _generate_members(self, template_file):
|
||||
lines = template_file.readlines()
|
||||
|
||||
target = Fragment.IDENTIFIER
|
||||
reference = Suppress("mapping") + Suppress("[") + target.setResultsName("target") + Suppress("]")
|
||||
pattern = White(" \t").setResultsName("indent") + reference
|
||||
|
||||
# Find the markers in the template file line by line. If line does not match marker grammar,
|
||||
# set it as a literal to be copied as is to the output file.
|
||||
for line in lines:
|
||||
try:
|
||||
parsed = pattern.parseString(line)
|
||||
|
||||
indent = parsed.indent
|
||||
target = parsed.target
|
||||
|
||||
marker = TemplateModel.Marker(target, indent, [])
|
||||
|
||||
self.members.append(marker)
|
||||
except ParseException:
|
||||
# Does not match marker syntax
|
||||
self.members.append(line)
|
||||
|
||||
def fill(self, mapping_rules):
|
||||
for member in self.members:
|
||||
target = None
|
||||
try:
|
||||
target = member.target
|
||||
rules = member.rules
|
||||
|
||||
del rules[:]
|
||||
|
||||
rules.extend(mapping_rules[target])
|
||||
except KeyError:
|
||||
message = GenerationException.UNDEFINED_REFERENCE + " to target '" + target + "'."
|
||||
raise GenerationException(message)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def write(self, output_file):
|
||||
# Add information that this is a generated file.
|
||||
output_file.write("/* Automatically generated file; DO NOT EDIT */\n")
|
||||
output_file.write("/* Espressif IoT Development Framework Linker Script */\n")
|
||||
output_file.write("/* Generated from: %s */\n" % self.file)
|
||||
output_file.write("\n")
|
||||
|
||||
# Do the text replacement
|
||||
for member in self.members:
|
||||
try:
|
||||
indent = member.indent
|
||||
rules = member.rules
|
||||
|
||||
for rule in rules:
|
||||
generated_line = "".join([indent, str(rule), '\n'])
|
||||
output_file.write(generated_line)
|
||||
except AttributeError:
|
||||
output_file.write(member)
|
||||
|
||||
|
||||
class GenerationException(LdGenFailure):
|
||||
"""
|
||||
Exception for linker script generation failures such as undefined references/ failure to
|
||||
evaluate conditions, duplicate mappings, etc.
|
||||
"""
|
||||
|
||||
UNDEFINED_REFERENCE = "Undefined reference"
|
||||
|
||||
def __init__(self, message, fragment=None):
|
||||
self.fragment = fragment
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
if self.fragment:
|
||||
return "%s\nIn fragment '%s' defined in '%s'." % (self.message, self.fragment.name, self.fragment.path)
|
||||
else:
|
||||
return self.message
|
||||
|
||||
|
||||
class SectionsInfo(dict):
|
||||
"""
|
||||
Encapsulates an output of objdump. Contains information about the static library sections
|
||||
and names
|
||||
"""
|
||||
|
||||
__info = collections.namedtuple("__info", "filename content")
|
||||
|
||||
def __init__(self):
|
||||
self.sections = dict()
|
||||
|
||||
def add_sections_info(self, sections_info_dump):
|
||||
first_line = sections_info_dump.readline()
|
||||
|
||||
archive_path = (Literal("In archive").suppress() +
|
||||
# trim the last character from archive_path, :
|
||||
Word(printables + " ").setResultsName("archive_path").setParseAction(lambda t: t[0][:-1]) +
|
||||
LineEnd())
|
||||
parser = archive_path
|
||||
|
||||
results = None
|
||||
|
||||
try:
|
||||
results = parser.parseString(first_line)
|
||||
except ParseException as p:
|
||||
raise ParseException("Parsing sections info for library " + sections_info_dump.name + " failed. " + p.message)
|
||||
|
||||
archive = os.path.basename(results.archive_path)
|
||||
self.sections[archive] = SectionsInfo.__info(sections_info_dump.name, sections_info_dump.read())
|
||||
|
||||
def _get_infos_from_file(self, info):
|
||||
# Object file line: '{object}: file format elf32-xtensa-le'
|
||||
object = Fragment.ENTITY.setResultsName("object") + Literal(":").suppress() + Literal("file format elf32-xtensa-le").suppress()
|
||||
|
||||
# Sections table
|
||||
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") +
|
||||
Literal("LMA") + Literal("File off") + Literal("Algn"))
|
||||
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) +
|
||||
Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) +
|
||||
Optional(Literal(","))))
|
||||
|
||||
# Content is object file line + sections table
|
||||
content = Group(object + header + Group(ZeroOrMore(entry)).setResultsName("sections"))
|
||||
|
||||
parser = Group(ZeroOrMore(content)).setResultsName("contents")
|
||||
|
||||
sections_info_text = info.content
|
||||
results = None
|
||||
|
||||
try:
|
||||
results = parser.parseString(sections_info_text)
|
||||
except ParseException as p:
|
||||
raise ParseException("Unable to parse section info file " + info.filename + ". " + p.message)
|
||||
|
||||
return results
|
||||
|
||||
def get_obj_sections(self, archive, obj):
|
||||
stored = self.sections[archive]
|
||||
|
||||
# Parse the contents of the sections file
|
||||
if not isinstance(stored, dict):
|
||||
parsed = self._get_infos_from_file(stored)
|
||||
stored = dict()
|
||||
for content in parsed.contents:
|
||||
sections = list(map(lambda s: s, content.sections))
|
||||
stored[content.object] = sections
|
||||
self.sections[archive] = stored
|
||||
|
||||
for obj_key in stored.keys():
|
||||
if obj_key == obj + ".o" or obj_key == obj + ".c.obj":
|
||||
return stored[obj_key]
|
150
tools/ldgen/ldgen.py
Executable file
150
tools/ldgen/ldgen.py
Executable file
@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import errno
|
||||
|
||||
from fragments import FragmentFile
|
||||
from sdkconfig import SDKConfig
|
||||
from generation import GenerationModel, TemplateModel, SectionsInfo
|
||||
from ldgen_common import LdGenFailure
|
||||
from pyparsing import ParseException, ParseFatalException
|
||||
from io import StringIO
|
||||
|
||||
|
||||
def _update_environment(args):
|
||||
env = [(name, value) for (name,value) in (e.split("=",1) for e in args.env)]
|
||||
for name, value in env:
|
||||
value = " ".join(value.split())
|
||||
os.environ[name] = value
|
||||
|
||||
if args.env_file is not None:
|
||||
env = json.load(args.env_file)
|
||||
os.environ.update(env)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
argparser = argparse.ArgumentParser(description="ESP-IDF linker script generator")
|
||||
|
||||
argparser.add_argument(
|
||||
"--input", "-i",
|
||||
help="Linker template file",
|
||||
type=argparse.FileType("r"))
|
||||
|
||||
argparser.add_argument(
|
||||
"--fragments", "-f",
|
||||
type=argparse.FileType("r"),
|
||||
help="Input fragment files",
|
||||
nargs="+")
|
||||
|
||||
argparser.add_argument(
|
||||
"--libraries-file",
|
||||
type=argparse.FileType("r"),
|
||||
help="File that contains the list of libraries in the build")
|
||||
|
||||
argparser.add_argument(
|
||||
"--output", "-o",
|
||||
help="Output linker script",
|
||||
type=str)
|
||||
|
||||
argparser.add_argument(
|
||||
"--config", "-c",
|
||||
help="Project configuration")
|
||||
|
||||
argparser.add_argument(
|
||||
"--kconfig", "-k",
|
||||
help="IDF Kconfig file")
|
||||
|
||||
argparser.add_argument(
|
||||
"--env", "-e",
|
||||
action='append', default=[],
|
||||
help='Environment to set when evaluating the config file', metavar='NAME=VAL')
|
||||
|
||||
argparser.add_argument('--env-file', type=argparse.FileType('r'),
|
||||
help='Optional file to load environment variables from. Contents '
|
||||
'should be a JSON object where each key/value pair is a variable.')
|
||||
|
||||
argparser.add_argument(
|
||||
"--objdump",
|
||||
help="Path to toolchain objdump")
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
input_file = args.input
|
||||
fragment_files = [] if not args.fragments else args.fragments
|
||||
libraries_file = args.libraries_file
|
||||
config_file = args.config
|
||||
output_path = args.output
|
||||
kconfig_file = args.kconfig
|
||||
objdump = args.objdump
|
||||
|
||||
try:
|
||||
sections_infos = SectionsInfo()
|
||||
for library in libraries_file:
|
||||
library = library.strip()
|
||||
if library:
|
||||
dump = StringIO(subprocess.check_output([objdump, "-h", library]).decode())
|
||||
dump.name = library
|
||||
sections_infos.add_sections_info(dump)
|
||||
|
||||
generation_model = GenerationModel()
|
||||
|
||||
_update_environment(args) # assign args.env and args.env_file to os.environ
|
||||
|
||||
sdkconfig = SDKConfig(kconfig_file, config_file)
|
||||
|
||||
for fragment_file in fragment_files:
|
||||
try:
|
||||
fragment_file = FragmentFile(fragment_file, sdkconfig)
|
||||
except (ParseException, ParseFatalException) as e:
|
||||
# ParseException is raised on incorrect grammar
|
||||
# ParseFatalException is raised on correct grammar, but inconsistent contents (ex. duplicate
|
||||
# keys, key unsupported by fragment, unexpected number of values, etc.)
|
||||
raise LdGenFailure("failed to parse %s\n%s" % (fragment_file.name, str(e)))
|
||||
generation_model.add_fragments_from_file(fragment_file)
|
||||
|
||||
mapping_rules = generation_model.generate_rules(sections_infos)
|
||||
|
||||
script_model = TemplateModel(input_file)
|
||||
script_model.fill(mapping_rules)
|
||||
|
||||
with tempfile.TemporaryFile("w+") as output:
|
||||
script_model.write(output)
|
||||
output.seek(0)
|
||||
|
||||
if not os.path.exists(os.path.dirname(output_path)):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(output_path))
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
with open(output_path, "w") as f: # only create output file after generation has suceeded
|
||||
f.write(output.read())
|
||||
except LdGenFailure as e:
|
||||
print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
23
tools/ldgen/ldgen_common.py
Normal file
23
tools/ldgen/ldgen_common.py
Normal file
@ -0,0 +1,23 @@
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
class LdGenFailure(RuntimeError):
|
||||
"""
|
||||
Parent class for any ldgen runtime failure which is due to input data
|
||||
"""
|
||||
def __init__(self, message):
|
||||
super(LdGenFailure, self).__init__(message)
|
80
tools/ldgen/samples/esp32.lf
Normal file
80
tools/ldgen/samples/esp32.lf
Normal file
@ -0,0 +1,80 @@
|
||||
[sections:text]
|
||||
entries:
|
||||
.text+
|
||||
.literal+
|
||||
|
||||
[sections:data]
|
||||
entries:
|
||||
.data+
|
||||
|
||||
[sections:bss]
|
||||
entries:
|
||||
.bss+
|
||||
|
||||
[sections:common]
|
||||
entries:
|
||||
COMMON
|
||||
|
||||
[sections:rodata]
|
||||
entries:
|
||||
.rodata+
|
||||
|
||||
[sections:rtc_text]
|
||||
entries:
|
||||
.rtc.text+
|
||||
.rtc.literal
|
||||
|
||||
[sections:rtc_data]
|
||||
entries:
|
||||
.rtc.data+
|
||||
|
||||
[sections:rtc_rodata]
|
||||
entries:
|
||||
.rtc.rodata+
|
||||
|
||||
[sections:rtc_bss]
|
||||
entries:
|
||||
.rtc.bss
|
||||
|
||||
[sections:extram_bss]
|
||||
entries:
|
||||
.ext_ram.bss+
|
||||
|
||||
[sections:iram]
|
||||
entries:
|
||||
.iram1+
|
||||
|
||||
[sections:dram]
|
||||
entries:
|
||||
.dram1+
|
||||
|
||||
[scheme:default]
|
||||
entries:
|
||||
text -> flash_text
|
||||
rodata -> flash_rodata
|
||||
data -> dram0_data
|
||||
bss -> dram0_bss
|
||||
common -> dram0_bss
|
||||
iram -> iram0_text
|
||||
dram -> dram0_data
|
||||
rtc_text -> rtc_text
|
||||
rtc_data -> rtc_data
|
||||
rtc_rodata -> rtc_data
|
||||
rtc_bss -> rtc_bss
|
||||
|
||||
[scheme:rtc]
|
||||
entries:
|
||||
text -> rtc_text
|
||||
data -> rtc_data
|
||||
rodata -> rtc_data
|
||||
bss -> rtc_bss
|
||||
common -> rtc_bss
|
||||
|
||||
[scheme:noflash]
|
||||
entries:
|
||||
text -> iram0_text
|
||||
rodata -> dram0_data
|
||||
|
||||
[scheme:noflash_data]
|
||||
entries:
|
||||
rodata -> dram0_data
|
62
tools/ldgen/samples/mappings.lf
Normal file
62
tools/ldgen/samples/mappings.lf
Normal file
@ -0,0 +1,62 @@
|
||||
|
||||
[mapping:heap]
|
||||
archive: libheap.a
|
||||
entries:
|
||||
multi_heap (noflash)
|
||||
multi_heap_poisoning (noflash)
|
||||
|
||||
[mapping:soc]
|
||||
archive: libsoc.a
|
||||
entries:
|
||||
* (noflash)
|
||||
|
||||
[mapping:freertos]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
* (noflash)
|
||||
|
||||
[mapping:esp32]
|
||||
archive: libesp32.a
|
||||
entries:
|
||||
core_dump (noflash)
|
||||
panic (noflash)
|
||||
|
||||
[mapping:app_trace]
|
||||
archive: libapp_trace.a
|
||||
entries:
|
||||
* (noflash)
|
||||
|
||||
[mapping:xtensa_debug_module]
|
||||
archive: libxtensa-debug-module.a
|
||||
entries:
|
||||
eri (noflash)
|
||||
|
||||
[mapping:phy]
|
||||
archive: libphy.a
|
||||
entries:
|
||||
* (noflash_data)
|
||||
|
||||
[mapping:rtc]
|
||||
archive: librtc.a
|
||||
entries:
|
||||
* (noflash)
|
||||
|
||||
[mapping:hal]
|
||||
archive: libhal.a
|
||||
entries:
|
||||
* (noflash)
|
||||
|
||||
[mapping:gcc]
|
||||
archive: libgcc.a
|
||||
entries:
|
||||
lib2funcs (noflash)
|
||||
|
||||
[mapping:spi_flash]
|
||||
archive: libspi_flash.a
|
||||
entries:
|
||||
spi_flash_rom_patch (noflash)
|
||||
|
||||
[mapping:gcov]
|
||||
archive: libgcov.a
|
||||
entries:
|
||||
* (noflash)
|
542
tools/ldgen/samples/sdkconfig
Normal file
542
tools/ldgen/samples/sdkconfig
Normal file
@ -0,0 +1,542 @@
|
||||
#
|
||||
# SDK tool configuration
|
||||
#
|
||||
CONFIG_SDK_TOOLPREFIX="xtensa-esp32-elf-"
|
||||
CONFIG_SDK_PYTHON="python"
|
||||
CONFIG_SDK_MAKE_WARN_UNDEFINED_VARIABLES=y
|
||||
|
||||
#
|
||||
# Bootloader config
|
||||
#
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_NONE=
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_ERROR=
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_WARN=y
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_INFO=
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_DEBUG=
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL_VERBOSE=
|
||||
CONFIG_BOOTLOADER_LOG_LEVEL=2
|
||||
CONFIG_BOOTLOADER_VDDSDIO_BOOST_1_8V=
|
||||
CONFIG_BOOTLOADER_VDDSDIO_BOOST_1_9V=y
|
||||
|
||||
#
|
||||
# Security features
|
||||
#
|
||||
CONFIG_SECURE_BOOT_ENABLED=
|
||||
CONFIG_SECURE_FLASH_ENC_ENABLED=
|
||||
|
||||
#
|
||||
# Serial flasher config
|
||||
#
|
||||
CONFIG_ESPTOOLPY_PORT="/dev/ttyUSB0"
|
||||
CONFIG_ESPTOOLPY_BAUD_115200B=y
|
||||
CONFIG_ESPTOOLPY_BAUD_230400B=
|
||||
CONFIG_ESPTOOLPY_BAUD_921600B=
|
||||
CONFIG_ESPTOOLPY_BAUD_2MB=
|
||||
CONFIG_ESPTOOLPY_BAUD_OTHER=
|
||||
CONFIG_ESPTOOLPY_BAUD_OTHER_VAL=115200
|
||||
CONFIG_ESPTOOLPY_BAUD=115200
|
||||
CONFIG_ESPTOOLPY_COMPRESSED=
|
||||
CONFIG_ESPTOOLPY_FLASHMODE_QIO=
|
||||
CONFIG_ESPTOOLPY_FLASHMODE_QOUT=
|
||||
CONFIG_ESPTOOLPY_FLASHMODE_DIO=y
|
||||
CONFIG_ESPTOOLPY_FLASHMODE_DOUT=
|
||||
CONFIG_ESPTOOLPY_FLASHMODE="dio"
|
||||
CONFIG_ESPTOOLPY_FLASHFREQ_80M=
|
||||
CONFIG_ESPTOOLPY_FLASHFREQ_40M=y
|
||||
CONFIG_ESPTOOLPY_FLASHFREQ_26M=
|
||||
CONFIG_ESPTOOLPY_FLASHFREQ_20M=
|
||||
CONFIG_ESPTOOLPY_FLASHFREQ="40m"
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_1MB=
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_2MB=y
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_4MB=
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_8MB=
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_16MB=
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE="2MB"
|
||||
CONFIG_ESPTOOLPY_FLASHSIZE_DETECT=y
|
||||
CONFIG_ESPTOOLPY_BEFORE_RESET=y
|
||||
CONFIG_ESPTOOLPY_BEFORE_NORESET=
|
||||
CONFIG_ESPTOOLPY_BEFORE="default_reset"
|
||||
CONFIG_ESPTOOLPY_AFTER_RESET=y
|
||||
CONFIG_ESPTOOLPY_AFTER_NORESET=
|
||||
CONFIG_ESPTOOLPY_AFTER="hard_reset"
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_9600B=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_57600B=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_115200B=y
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_230400B=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_921600B=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_2MB=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_OTHER=
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD_OTHER_VAL=115200
|
||||
CONFIG_ESPTOOLPY_MONITOR_BAUD=115200
|
||||
|
||||
#
|
||||
# Partition Table
|
||||
#
|
||||
CONFIG_PARTITION_TABLE_SINGLE_APP=y
|
||||
CONFIG_PARTITION_TABLE_TWO_OTA=
|
||||
CONFIG_PARTITION_TABLE_CUSTOM=
|
||||
CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partitions.csv"
|
||||
CONFIG_PARTITION_TABLE_CUSTOM_APP_BIN_OFFSET=0x10000
|
||||
CONFIG_PARTITION_TABLE_FILENAME="partitions_singleapp.csv"
|
||||
CONFIG_APP_OFFSET=0x10000
|
||||
CONFIG_PARTITION_TABLE_MD5=y
|
||||
|
||||
#
|
||||
# Compiler options
|
||||
#
|
||||
CONFIG_COMPILER_OPTIMIZATION_LEVEL_DEBUG=y
|
||||
CONFIG_COMPILER_OPTIMIZATION_LEVEL_RELEASE=
|
||||
CONFIG_COMPILER_OPTIMIZATION_ASSERTIONS_ENABLE=y
|
||||
CONFIG_COMPILER_OPTIMIZATION_ASSERTIONS_SILENT=
|
||||
CONFIG_COMPILER_OPTIMIZATION_ASSERTIONS_DISABLE=
|
||||
CONFIG_COMPILER_CXX_EXCEPTIONS=
|
||||
CONFIG_COMPILER_STACK_CHECK_MODE_NONE=y
|
||||
CONFIG_COMPILER_STACK_CHECK_MODE_NORM=
|
||||
CONFIG_COMPILER_STACK_CHECK_MODE_STRONG=
|
||||
CONFIG_COMPILER_STACK_CHECK_MODE_ALL=
|
||||
CONFIG_COMPILER_STACK_CHECK=
|
||||
CONFIG_COMPILER_WARN_WRITE_STRINGS=
|
||||
|
||||
#
|
||||
# Component config
|
||||
#
|
||||
|
||||
#
|
||||
# Application Level Tracing
|
||||
#
|
||||
CONFIG_ESP32_APPTRACE_DEST_TRAX=
|
||||
CONFIG_ESP32_APPTRACE_DEST_NONE=y
|
||||
CONFIG_ESP32_APPTRACE_ENABLE=
|
||||
CONFIG_ESP32_APPTRACE_LOCK_ENABLE=y
|
||||
|
||||
#
|
||||
# FreeRTOS SystemView Tracing
|
||||
#
|
||||
CONFIG_AWS_IOT_SDK=
|
||||
|
||||
#
|
||||
# Bluetooth
|
||||
#
|
||||
CONFIG_BT_ENABLED=
|
||||
CONFIG_BTDM_CTRL_PINNED_TO_CORE=0
|
||||
CONFIG_BT_RESERVE_DRAM=0
|
||||
|
||||
#
|
||||
# ADC configuration
|
||||
#
|
||||
CONFIG_ADC_FORCE_XPD_FSM=
|
||||
CONFIG_ADC_DISABLE_DAC=y
|
||||
|
||||
#
|
||||
# ESP32-specific
|
||||
#
|
||||
CONFIG_ESP32_DEFAULT_CPU_FREQ_80=
|
||||
CONFIG_ESP32_DEFAULT_CPU_FREQ_160=
|
||||
CONFIG_ESP32_DEFAULT_CPU_FREQ_240=y
|
||||
CONFIG_ESP32_DEFAULT_CPU_FREQ_MHZ=240
|
||||
CONFIG_ESP32_SPIRAM_SUPPORT=
|
||||
CONFIG_ESP32_MEMMAP_TRACEMEM=
|
||||
CONFIG_ESP32_MEMMAP_TRACEMEM_TWOBANKS=
|
||||
CONFIG_ESP32_TRAX=
|
||||
CONFIG_ESP32_TRACEMEM_RESERVE_DRAM=0x0
|
||||
CONFIG_ESP32_ENABLE_COREDUMP_TO_FLASH=
|
||||
CONFIG_ESP32_ENABLE_COREDUMP_TO_UART=
|
||||
CONFIG_ESP32_ENABLE_COREDUMP_TO_NONE=y
|
||||
CONFIG_ESP32_ENABLE_COREDUMP=
|
||||
CONFIG_ESP32_UNIVERSAL_MAC_ADDRESSES_TWO=
|
||||
CONFIG_ESP32_UNIVERSAL_MAC_ADDRESSES_FOUR=y
|
||||
CONFIG_ESP32_UNIVERSAL_MAC_ADDRESSES=4
|
||||
CONFIG_ESP_SYSTEM_EVENT_QUEUE_SIZE=32
|
||||
CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=2048
|
||||
CONFIG_ESP_MAIN_TASK_STACK_SIZE=4096
|
||||
CONFIG_ESP_IPC_TASK_STACK_SIZE=1024
|
||||
CONFIG_ESP_TIMER_TASK_STACK_SIZE=3584
|
||||
CONFIG_NEWLIB_STDOUT_LINE_ENDING_CRLF=y
|
||||
CONFIG_NEWLIB_STDOUT_LINE_ENDING_LF=
|
||||
CONFIG_NEWLIB_STDOUT_LINE_ENDING_CR=
|
||||
CONFIG_NEWLIB_STDIN_LINE_ENDING_CRLF=
|
||||
CONFIG_NEWLIB_STDIN_LINE_ENDING_LF=
|
||||
CONFIG_NEWLIB_STDIN_LINE_ENDING_CR=y
|
||||
CONFIG_NEWLIB_NANO_FORMAT=
|
||||
CONFIG_ESP_CONSOLE_UART_DEFAULT=y
|
||||
CONFIG_ESP_CONSOLE_UART_CUSTOM=
|
||||
CONFIG_ESP_CONSOLE_UART_NONE=
|
||||
CONFIG_ESP_CONSOLE_UART_NUM=0
|
||||
CONFIG_ESP_CONSOLE_UART_BAUDRATE=115200
|
||||
CONFIG_ESP32_ULP_COPROC_ENABLED=
|
||||
CONFIG_ESP32_ULP_COPROC_RESERVE_MEM=0
|
||||
CONFIG_ESP32_PANIC_PRINT_HALT=
|
||||
CONFIG_ESP32_PANIC_PRINT_REBOOT=y
|
||||
CONFIG_ESP32_PANIC_SILENT_REBOOT=
|
||||
CONFIG_ESP32_PANIC_GDBSTUB=
|
||||
CONFIG_ESP32_DEBUG_OCDAWARE=y
|
||||
CONFIG_ESP_INT_WDT=y
|
||||
CONFIG_ESP_INT_WDT_TIMEOUT_MS=300
|
||||
CONFIG_ESP_TASK_WDT=y
|
||||
CONFIG_ESP_TASK_WDT_PANIC=
|
||||
CONFIG_ESP_TASK_WDT_TIMEOUT_S=5
|
||||
CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0=y
|
||||
CONFIG_ESP32_BROWNOUT_DET=y
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_0=y
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_1=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_2=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_3=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_4=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_5=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_6=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL_SEL_7=
|
||||
CONFIG_ESP32_BROWNOUT_DET_LVL=0
|
||||
CONFIG_ESP32_TIME_SYSCALL_USE_RTC_FRC1=y
|
||||
CONFIG_ESP32_TIME_SYSCALL_USE_RTC=
|
||||
CONFIG_ESP32_TIME_SYSCALL_USE_FRC1=
|
||||
CONFIG_ESP32_TIME_SYSCALL_USE_NONE=
|
||||
CONFIG_ESP32_RTC_CLK_SRC_INT_RC=y
|
||||
CONFIG_ESP32_RTC_CLK_SRC_EXT_CRYS=
|
||||
CONFIG_ESP32_RTC_CLK_CAL_CYCLES=1024
|
||||
CONFIG_ESP32_RTC_XTAL_BOOTSTRAP_CYCLES=100
|
||||
CONFIG_ESP32_DEEP_SLEEP_WAKEUP_DELAY=2000
|
||||
CONFIG_ESP32_XTAL_FREQ_40=y
|
||||
CONFIG_ESP32_XTAL_FREQ_26=
|
||||
CONFIG_ESP32_XTAL_FREQ_AUTO=
|
||||
CONFIG_ESP32_XTAL_FREQ=40
|
||||
CONFIG_ESP32_DISABLE_BASIC_ROM_CONSOLE=
|
||||
CONFIG_ESP32_NO_BLOBS=
|
||||
CONFIG_ESP_TIMER_PROFILING=
|
||||
CONFIG_ESP32_COMPATIBLE_PRE_V2_1_BOOTLOADERS=
|
||||
CONFIG_ESP_ERR_TO_NAME_LOOKUP=y
|
||||
|
||||
#
|
||||
# Wi-Fi
|
||||
#
|
||||
CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=10
|
||||
CONFIG_ESP32_WIFI_DYNAMIC_RX_BUFFER_NUM=32
|
||||
CONFIG_ESP32_WIFI_STATIC_TX_BUFFER=
|
||||
CONFIG_ESP32_WIFI_DYNAMIC_TX_BUFFER=y
|
||||
CONFIG_ESP32_WIFI_TX_BUFFER_TYPE=1
|
||||
CONFIG_ESP32_WIFI_DYNAMIC_TX_BUFFER_NUM=32
|
||||
CONFIG_ESP32_WIFI_AMPDU_TX_ENABLED=y
|
||||
CONFIG_ESP32_WIFI_TX_BA_WIN=6
|
||||
CONFIG_ESP32_WIFI_AMPDU_RX_ENABLED=y
|
||||
CONFIG_ESP32_WIFI_RX_BA_WIN=6
|
||||
CONFIG_ESP32_WIFI_NVS_ENABLED=y
|
||||
|
||||
#
|
||||
# PHY
|
||||
#
|
||||
CONFIG_ESP32_PHY_CALIBRATION_AND_DATA_STORAGE=y
|
||||
CONFIG_ESP32_PHY_INIT_DATA_IN_PARTITION=
|
||||
CONFIG_ESP32_PHY_MAX_WIFI_TX_POWER=20
|
||||
CONFIG_ESP32_PHY_MAX_TX_POWER=20
|
||||
|
||||
#
|
||||
# Power Management
|
||||
#
|
||||
CONFIG_PM_ENABLE=
|
||||
|
||||
#
|
||||
# ADC-Calibration
|
||||
#
|
||||
CONFIG_ADC_CAL_EFUSE_TP_ENABLE=y
|
||||
CONFIG_ADC_CAL_EFUSE_VREF_ENABLE=y
|
||||
CONFIG_ADC_CAL_LUT_ENABLE=y
|
||||
|
||||
#
|
||||
# Ethernet
|
||||
#
|
||||
CONFIG_ETH_DMA_RX_BUF_NUM=10
|
||||
CONFIG_ETH_DMA_TX_BUF_NUM=10
|
||||
CONFIG_ETH_EMAC_L2_TO_L3_RX_BUF_MODE=
|
||||
CONFIG_ETH_EMAC_TASK_PRIORITY=20
|
||||
|
||||
#
|
||||
# FAT Filesystem support
|
||||
#
|
||||
CONFIG_FATFS_CODEPAGE_DYNAMIC=
|
||||
CONFIG_FATFS_CODEPAGE_437=y
|
||||
CONFIG_FATFS_CODEPAGE_720=
|
||||
CONFIG_FATFS_CODEPAGE_737=
|
||||
CONFIG_FATFS_CODEPAGE_771=
|
||||
CONFIG_FATFS_CODEPAGE_775=
|
||||
CONFIG_FATFS_CODEPAGE_850=
|
||||
CONFIG_FATFS_CODEPAGE_852=
|
||||
CONFIG_FATFS_CODEPAGE_855=
|
||||
CONFIG_FATFS_CODEPAGE_857=
|
||||
CONFIG_FATFS_CODEPAGE_860=
|
||||
CONFIG_FATFS_CODEPAGE_861=
|
||||
CONFIG_FATFS_CODEPAGE_862=
|
||||
CONFIG_FATFS_CODEPAGE_863=
|
||||
CONFIG_FATFS_CODEPAGE_864=
|
||||
CONFIG_FATFS_CODEPAGE_865=
|
||||
CONFIG_FATFS_CODEPAGE_866=
|
||||
CONFIG_FATFS_CODEPAGE_869=
|
||||
CONFIG_FATFS_CODEPAGE_932=
|
||||
CONFIG_FATFS_CODEPAGE_936=
|
||||
CONFIG_FATFS_CODEPAGE_949=
|
||||
CONFIG_FATFS_CODEPAGE_950=
|
||||
CONFIG_FATFS_CODEPAGE=437
|
||||
CONFIG_FATFS_LFN_NONE=y
|
||||
CONFIG_FATFS_LFN_HEAP=
|
||||
CONFIG_FATFS_LFN_STACK=
|
||||
CONFIG_FATFS_FS_LOCK=0
|
||||
CONFIG_FATFS_TIMEOUT_MS=10000
|
||||
CONFIG_FATFS_PER_FILE_CACHE=y
|
||||
|
||||
#
|
||||
# FreeRTOS
|
||||
#
|
||||
CONFIG_FREERTOS_UNICORE=y
|
||||
CONFIG_FREERTOS_CORETIMER_0=y
|
||||
CONFIG_FREERTOS_CORETIMER_1=
|
||||
CONFIG_FREERTOS_HZ=1000
|
||||
CONFIG_FREERTOS_ASSERT_ON_UNTESTED_FUNCTION=y
|
||||
CONFIG_FREERTOS_CHECK_STACKOVERFLOW_NONE=
|
||||
CONFIG_FREERTOS_CHECK_STACKOVERFLOW_PTRVAL=y
|
||||
CONFIG_FREERTOS_CHECK_STACKOVERFLOW_CANARY=
|
||||
CONFIG_FREERTOS_WATCHPOINT_END_OF_STACK=
|
||||
CONFIG_FREERTOS_INTERRUPT_BACKTRACE=y
|
||||
CONFIG_FREERTOS_THREAD_LOCAL_STORAGE_POINTERS=3
|
||||
CONFIG_FREERTOS_ASSERT_FAIL_ABORT=y
|
||||
CONFIG_FREERTOS_ASSERT_FAIL_PRINT_CONTINUE=
|
||||
CONFIG_FREERTOS_ASSERT_DISABLE=
|
||||
CONFIG_FREERTOS_IDLE_TASK_STACKSIZE=1024
|
||||
CONFIG_FREERTOS_ISR_STACKSIZE=1536
|
||||
CONFIG_FREERTOS_LEGACY_HOOKS=
|
||||
CONFIG_FREERTOS_MAX_TASK_NAME_LEN=16
|
||||
CONFIG_FREERTOS_SUPPORT_STATIC_ALLOCATION=
|
||||
CONFIG_FREERTOS_TIMER_TASK_PRIORITY=1
|
||||
CONFIG_FREERTOS_TIMER_TASK_STACK_DEPTH=2048
|
||||
CONFIG_FREERTOS_TIMER_QUEUE_LENGTH=10
|
||||
CONFIG_FREERTOS_QUEUE_REGISTRY_SIZE=0
|
||||
CONFIG_FREERTOS_USE_TRACE_FACILITY=
|
||||
CONFIG_FREERTOS_GENERATE_RUN_TIME_STATS=
|
||||
CONFIG_FREERTOS_DEBUG_INTERNALS=
|
||||
|
||||
#
|
||||
# Heap memory debugging
|
||||
#
|
||||
CONFIG_HEAP_POISONING_DISABLED=y
|
||||
CONFIG_HEAP_POISONING_LIGHT=
|
||||
CONFIG_HEAP_POISONING_COMPREHENSIVE=
|
||||
CONFIG_HEAP_TRACING=
|
||||
|
||||
#
|
||||
# libsodium
|
||||
#
|
||||
CONFIG_LIBSODIUM_USE_MBEDTLS_SHA=y
|
||||
|
||||
#
|
||||
# Log output
|
||||
#
|
||||
CONFIG_LOG_DEFAULT_LEVEL_NONE=
|
||||
CONFIG_LOG_DEFAULT_LEVEL_ERROR=
|
||||
CONFIG_LOG_DEFAULT_LEVEL_WARN=
|
||||
CONFIG_LOG_DEFAULT_LEVEL_INFO=y
|
||||
CONFIG_LOG_DEFAULT_LEVEL_DEBUG=
|
||||
CONFIG_LOG_DEFAULT_LEVEL_VERBOSE=
|
||||
CONFIG_LOG_DEFAULT_LEVEL=3
|
||||
CONFIG_LOG_COLORS=y
|
||||
|
||||
#
|
||||
# LWIP
|
||||
#
|
||||
CONFIG_LWIP_L2_TO_L3_COPY=
|
||||
CONFIG_LWIP_IRAM_OPTIMIZATION=
|
||||
CONFIG_LWIP_MAX_SOCKETS=4
|
||||
CONFIG_LWIP_SO_REUSE=
|
||||
CONFIG_LWIP_SO_RCVBUF=
|
||||
CONFIG_LWIP_DHCP_MAX_NTP_SERVERS=1
|
||||
CONFIG_LWIP_IP_FRAG=
|
||||
CONFIG_LWIP_IP_REASSEMBLY=
|
||||
CONFIG_LWIP_STATS=
|
||||
CONFIG_LWIP_ETHARP_TRUST_IP_MAC=y
|
||||
CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=32
|
||||
CONFIG_LWIP_DHCP_DOES_ARP_CHECK=y
|
||||
|
||||
#
|
||||
# DHCP server
|
||||
#
|
||||
CONFIG_LWIP_DHCPS_LEASE_UNIT=60
|
||||
CONFIG_LWIP_DHCPS_MAX_STATION_NUM=8
|
||||
CONFIG_LWIP_AUTOIP=
|
||||
CONFIG_LWIP_NETIF_LOOPBACK=y
|
||||
CONFIG_LWIP_LOOPBACK_MAX_PBUFS=8
|
||||
|
||||
#
|
||||
# TCP
|
||||
#
|
||||
CONFIG_LWIP_MAX_ACTIVE_TCP=16
|
||||
CONFIG_LWIP_MAX_LISTENING_TCP=16
|
||||
CONFIG_LWIP_TCP_MAXRTX=12
|
||||
CONFIG_LWIP_TCP_SYNMAXRTX=6
|
||||
CONFIG_LWIP_TCP_MSS=1436
|
||||
CONFIG_LWIP_TCP_MSL=60000
|
||||
CONFIG_LWIP_TCP_SND_BUF_DEFAULT=5744
|
||||
CONFIG_LWIP_TCP_WND_DEFAULT=5744
|
||||
CONFIG_LWIP_TCP_RECVMBOX_SIZE=6
|
||||
CONFIG_LWIP_TCP_QUEUE_OOSEQ=y
|
||||
CONFIG_LWIP_TCP_OVERSIZE_MSS=y
|
||||
CONFIG_LWIP_TCP_OVERSIZE_QUARTER_MSS=
|
||||
CONFIG_LWIP_TCP_OVERSIZE_DISABLE=
|
||||
|
||||
#
|
||||
# UDP
|
||||
#
|
||||
CONFIG_LWIP_MAX_UDP_PCBS=16
|
||||
CONFIG_LWIP_UDP_RECVMBOX_SIZE=6
|
||||
CONFIG_LWIP_TCPIP_TASK_STACK_SIZE=2048
|
||||
CONFIG_LWIP_PPP_SUPPORT=
|
||||
|
||||
#
|
||||
# ICMP
|
||||
#
|
||||
CONFIG_LWIP_MULTICAST_PING=
|
||||
CONFIG_LWIP_BROADCAST_PING=
|
||||
|
||||
#
|
||||
# LWIP RAW API
|
||||
#
|
||||
CONFIG_LWIP_MAX_RAW_PCBS=16
|
||||
|
||||
#
|
||||
# mbedTLS
|
||||
#
|
||||
CONFIG_MBEDTLS_SSL_MAX_CONTENT_LEN=16384
|
||||
CONFIG_MBEDTLS_DEBUG=
|
||||
CONFIG_MBEDTLS_HARDWARE_AES=y
|
||||
CONFIG_MBEDTLS_HARDWARE_MPI=y
|
||||
CONFIG_MBEDTLS_MPI_USE_INTERRUPT=y
|
||||
CONFIG_MBEDTLS_HARDWARE_SHA=
|
||||
CONFIG_MBEDTLS_HAVE_TIME=y
|
||||
CONFIG_MBEDTLS_HAVE_TIME_DATE=
|
||||
CONFIG_MBEDTLS_TLS_SERVER_AND_CLIENT=y
|
||||
CONFIG_MBEDTLS_TLS_SERVER_ONLY=
|
||||
CONFIG_MBEDTLS_TLS_CLIENT_ONLY=
|
||||
CONFIG_MBEDTLS_TLS_DISABLED=
|
||||
CONFIG_MBEDTLS_TLS_SERVER=y
|
||||
CONFIG_MBEDTLS_TLS_CLIENT=y
|
||||
CONFIG_MBEDTLS_TLS_ENABLED=y
|
||||
|
||||
#
|
||||
# TLS Key Exchange Methods
|
||||
#
|
||||
CONFIG_MBEDTLS_PSK_MODES=
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_RSA=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_DHE_RSA=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_ELLIPTIC_CURVE=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_ECDHE_RSA=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_ECDHE_ECDSA=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_ECDH_ECDSA=y
|
||||
CONFIG_MBEDTLS_KEY_EXCHANGE_ECDH_RSA=y
|
||||
CONFIG_MBEDTLS_SSL_RENEGOTIATION=y
|
||||
CONFIG_MBEDTLS_SSL_PROTO_SSL3=
|
||||
CONFIG_MBEDTLS_SSL_PROTO_TLS1=y
|
||||
CONFIG_MBEDTLS_SSL_PROTO_TLS1_1=y
|
||||
CONFIG_MBEDTLS_SSL_PROTO_TLS1_2=y
|
||||
CONFIG_MBEDTLS_SSL_PROTO_DTLS=
|
||||
CONFIG_MBEDTLS_SSL_ALPN=y
|
||||
CONFIG_MBEDTLS_SSL_SESSION_TICKETS=y
|
||||
|
||||
#
|
||||
# Symmetric Ciphers
|
||||
#
|
||||
CONFIG_MBEDTLS_AES_C=y
|
||||
CONFIG_MBEDTLS_CAMELLIA_C=
|
||||
CONFIG_MBEDTLS_DES_C=
|
||||
CONFIG_MBEDTLS_RC4_DISABLED=y
|
||||
CONFIG_MBEDTLS_RC4_ENABLED_NO_DEFAULT=
|
||||
CONFIG_MBEDTLS_RC4_ENABLED=
|
||||
CONFIG_MBEDTLS_BLOWFISH_C=
|
||||
CONFIG_MBEDTLS_XTEA_C=
|
||||
CONFIG_MBEDTLS_CCM_C=y
|
||||
CONFIG_MBEDTLS_GCM_C=y
|
||||
CONFIG_MBEDTLS_RIPEMD160_C=
|
||||
|
||||
#
|
||||
# Certificates
|
||||
#
|
||||
CONFIG_MBEDTLS_PEM_PARSE_C=y
|
||||
CONFIG_MBEDTLS_PEM_WRITE_C=y
|
||||
CONFIG_MBEDTLS_X509_CRL_PARSE_C=y
|
||||
CONFIG_MBEDTLS_X509_CSR_PARSE_C=y
|
||||
CONFIG_MBEDTLS_ECP_C=y
|
||||
CONFIG_MBEDTLS_ECDH_C=y
|
||||
CONFIG_MBEDTLS_ECDSA_C=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP192R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP224R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP256R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP384R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP521R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP192K1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP224K1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_SECP256K1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_BP256R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_BP384R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_BP512R1_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_DP_CURVE25519_ENABLED=y
|
||||
CONFIG_MBEDTLS_ECP_NIST_OPTIM=y
|
||||
|
||||
#
|
||||
# OpenSSL
|
||||
#
|
||||
CONFIG_OPENSSL_DEBUG=
|
||||
CONFIG_OPENSSL_ASSERT_DO_NOTHING=y
|
||||
CONFIG_OPENSSL_ASSERT_EXIT=
|
||||
|
||||
#
|
||||
# PThreads
|
||||
#
|
||||
CONFIG_ESP32_PTHREAD_TASK_PRIO_DEFAULT=5
|
||||
CONFIG_ESP32_PTHREAD_TASK_STACK_SIZE_DEFAULT=3072
|
||||
|
||||
#
|
||||
# SPI Flash driver
|
||||
#
|
||||
CONFIG_SPI_FLASH_VERIFY_WRITE=
|
||||
CONFIG_SPI_FLASH_ENABLE_COUNTERS=
|
||||
CONFIG_SPI_FLASH_ROM_DRIVER_PATCH=y
|
||||
CONFIG_SPI_FLASH_DANGEROUS_WRITE_ABORTS=y
|
||||
CONFIG_SPI_FLASH_DANGEROUS_WRITE_FAILS=
|
||||
CONFIG_SPI_FLASH_DANGEROUS_WRITE_ALLOWED=
|
||||
|
||||
#
|
||||
# SPIFFS Configuration
|
||||
#
|
||||
CONFIG_SPIFFS_MAX_PARTITIONS=3
|
||||
|
||||
#
|
||||
# SPIFFS Cache Configuration
|
||||
#
|
||||
CONFIG_SPIFFS_CACHE=y
|
||||
CONFIG_SPIFFS_CACHE_WR=y
|
||||
CONFIG_SPIFFS_CACHE_STATS=
|
||||
CONFIG_SPIFFS_PAGE_CHECK=y
|
||||
CONFIG_SPIFFS_GC_MAX_RUNS=10
|
||||
CONFIG_SPIFFS_GC_STATS=
|
||||
CONFIG_SPIFFS_PAGE_SIZE=256
|
||||
CONFIG_SPIFFS_OBJ_NAME_LEN=32
|
||||
CONFIG_SPIFFS_USE_MAGIC=y
|
||||
CONFIG_SPIFFS_USE_MAGIC_LENGTH=y
|
||||
CONFIG_SPIFFS_META_LENGTH=4
|
||||
CONFIG_SPIFFS_USE_MTIME=y
|
||||
|
||||
#
|
||||
# Debug Configuration
|
||||
#
|
||||
CONFIG_SPIFFS_DBG=
|
||||
CONFIG_SPIFFS_API_DBG=
|
||||
CONFIG_SPIFFS_GC_DBG=
|
||||
CONFIG_SPIFFS_CACHE_DBG=
|
||||
CONFIG_SPIFFS_CHECK_DBG=
|
||||
CONFIG_SPIFFS_TEST_VISUALISATION=
|
||||
|
||||
#
|
||||
# tcpip adapter
|
||||
#
|
||||
CONFIG_NETIF_IP_LOST_TIMER_INTERVAL=120
|
||||
|
||||
#
|
||||
# Wear Levelling
|
||||
#
|
||||
CONFIG_WL_SECTOR_SIZE_512=
|
||||
CONFIG_WL_SECTOR_SIZE_4096=y
|
||||
CONFIG_WL_SECTOR_SIZE=4096
|
1626
tools/ldgen/samples/sections.info
Normal file
1626
tools/ldgen/samples/sections.info
Normal file
File diff suppressed because it is too large
Load Diff
214
tools/ldgen/samples/template.ld
Normal file
214
tools/ldgen/samples/template.ld
Normal file
@ -0,0 +1,214 @@
|
||||
/* Default entry point: */
|
||||
ENTRY(call_start_cpu0);
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
/* RTC fast memory holds RTC wake stub code,
|
||||
including from any source file named rtc_wake_stub*.c
|
||||
*/
|
||||
.rtc.text :
|
||||
{
|
||||
. = ALIGN(4);
|
||||
|
||||
mapping[rtc_text]
|
||||
|
||||
*rtc_wake_stub*.o(.literal .text .literal.* .text.*)
|
||||
} >rtc_iram_seg
|
||||
|
||||
/* RTC slow memory holds RTC wake stub
|
||||
data/rodata, including from any source file
|
||||
named rtc_wake_stub*.c
|
||||
*/
|
||||
.rtc.data :
|
||||
{
|
||||
_rtc_data_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_data]
|
||||
|
||||
*rtc_wake_stub*.o(.data .rodata .data.* .rodata.* .bss .bss.*)
|
||||
_rtc_data_end = ABSOLUTE(.);
|
||||
} > rtc_slow_seg
|
||||
|
||||
/* RTC bss, from any source file named rtc_wake_stub*.c */
|
||||
.rtc.bss (NOLOAD) :
|
||||
{
|
||||
_rtc_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_bss]
|
||||
|
||||
*rtc_wake_stub*.o(.bss .bss.*)
|
||||
*rtc_wake_stub*.o(COMMON)
|
||||
_rtc_bss_end = ABSOLUTE(.);
|
||||
} > rtc_slow_seg
|
||||
|
||||
/* Send .iram0 code to iram */
|
||||
.iram0.vectors :
|
||||
{
|
||||
/* Vectors go to IRAM */
|
||||
_init_start = ABSOLUTE(.);
|
||||
/* Vectors according to builds/RF-2015.2-win32/esp108_v1_2_s5_512int_2/config.html */
|
||||
. = 0x0;
|
||||
KEEP(*(.WindowVectors.text));
|
||||
. = 0x180;
|
||||
KEEP(*(.Level2InterruptVector.text));
|
||||
. = 0x1c0;
|
||||
KEEP(*(.Level3InterruptVector.text));
|
||||
. = 0x200;
|
||||
KEEP(*(.Level4InterruptVector.text));
|
||||
. = 0x240;
|
||||
KEEP(*(.Level5InterruptVector.text));
|
||||
. = 0x280;
|
||||
KEEP(*(.DebugExceptionVector.text));
|
||||
. = 0x2c0;
|
||||
KEEP(*(.NMIExceptionVector.text));
|
||||
. = 0x300;
|
||||
KEEP(*(.KernelExceptionVector.text));
|
||||
. = 0x340;
|
||||
KEEP(*(.UserExceptionVector.text));
|
||||
. = 0x3C0;
|
||||
KEEP(*(.DoubleExceptionVector.text));
|
||||
. = 0x400;
|
||||
*(.*Vector.literal)
|
||||
|
||||
*(.UserEnter.literal);
|
||||
*(.UserEnter.text);
|
||||
. = ALIGN (16);
|
||||
*(.entry.text)
|
||||
*(.init.literal)
|
||||
*(.init)
|
||||
_init_end = ABSOLUTE(.);
|
||||
|
||||
/* This goes here, not at top of linker script, so addr2line finds it last,
|
||||
and uses it in preference to the first symbol in IRAM */
|
||||
_iram_start = ABSOLUTE(0);
|
||||
} > iram0_0_seg
|
||||
|
||||
.iram0.text :
|
||||
{
|
||||
/* Code marked as runnning out of IRAM */
|
||||
_iram_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[iram0_text]
|
||||
|
||||
_iram_text_end = ABSOLUTE(.);
|
||||
} > iram0_0_seg
|
||||
|
||||
.dram0.data :
|
||||
{
|
||||
_data_start = ABSOLUTE(.);
|
||||
|
||||
mapping[dram0_data]
|
||||
|
||||
*(.gnu.linkonce.d.*)
|
||||
*(.data1)
|
||||
*(.sdata)
|
||||
*(.sdata.*)
|
||||
*(.gnu.linkonce.s.*)
|
||||
*(.sdata2)
|
||||
*(.sdata2.*)
|
||||
*(.gnu.linkonce.s2.*)
|
||||
*(.jcr)
|
||||
_data_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} >dram0_0_seg
|
||||
|
||||
/* Shared RAM */
|
||||
.dram0.bss (NOLOAD) :
|
||||
{
|
||||
. = ALIGN (8);
|
||||
_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[dram0_bss]
|
||||
|
||||
*(.dynsbss)
|
||||
*(.sbss)
|
||||
*(.sbss.*)
|
||||
*(.gnu.linkonce.sb.*)
|
||||
*(.scommon)
|
||||
*(.sbss2)
|
||||
*(.sbss2.*)
|
||||
*(.gnu.linkonce.sb2.*)
|
||||
*(.dynbss)
|
||||
*(.share.mem)
|
||||
*(.gnu.linkonce.b.*)
|
||||
|
||||
. = ALIGN (8);
|
||||
_bss_end = ABSOLUTE(.);
|
||||
} >dram0_0_seg
|
||||
|
||||
.flash.rodata :
|
||||
{
|
||||
_rodata_start = ABSOLUTE(.);
|
||||
|
||||
mapping[flash_rodata]
|
||||
|
||||
*(.irom1.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.gnu.linkonce.r.*)
|
||||
*(.rodata1)
|
||||
__XT_EXCEPTION_TABLE_ = ABSOLUTE(.);
|
||||
*(.xt_except_table)
|
||||
*(.gcc_except_table .gcc_except_table.*)
|
||||
*(.gnu.linkonce.e.*)
|
||||
*(.gnu.version_r)
|
||||
. = (. + 3) & ~ 3;
|
||||
__eh_frame = ABSOLUTE(.);
|
||||
KEEP(*(.eh_frame))
|
||||
. = (. + 7) & ~ 3;
|
||||
/* C++ constructor and destructor tables, properly ordered: */
|
||||
__init_array_start = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.o(.ctors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.o) .ctors))
|
||||
KEEP (*(SORT(.ctors.*)))
|
||||
KEEP (*(.ctors))
|
||||
__init_array_end = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.o(.dtors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.o) .dtors))
|
||||
KEEP (*(SORT(.dtors.*)))
|
||||
KEEP (*(.dtors))
|
||||
/* C++ exception handlers table: */
|
||||
__XT_EXCEPTION_DESCS_ = ABSOLUTE(.);
|
||||
*(.xt_except_desc)
|
||||
*(.gnu.linkonce.h.*)
|
||||
__XT_EXCEPTION_DESCS_END__ = ABSOLUTE(.);
|
||||
*(.xt_except_desc_end)
|
||||
*(.dynamic)
|
||||
*(.gnu.version_d)
|
||||
_rodata_end = ABSOLUTE(.);
|
||||
/* Literals are also RO data. */
|
||||
_lit4_start = ABSOLUTE(.);
|
||||
*(*.lit4)
|
||||
*(.lit4.*)
|
||||
*(.gnu.linkonce.lit4.*)
|
||||
_lit4_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
_thread_local_start = ABSOLUTE(.);
|
||||
*(.tdata)
|
||||
*(.tdata.*)
|
||||
*(.tbss)
|
||||
*(.tbss.*)
|
||||
_thread_local_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} >drom0_0_seg
|
||||
|
||||
.flash.text :
|
||||
{
|
||||
_stext = .;
|
||||
_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[flash_text]
|
||||
|
||||
*(.stub .gnu.warning .gnu.linkonce.literal.* .gnu.linkonce.t.*.literal .gnu.linkonce.t.*)
|
||||
*(.irom0.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.fini.literal)
|
||||
*(.fini)
|
||||
*(.gnu.version)
|
||||
_text_end = ABSOLUTE(.);
|
||||
_etext = .;
|
||||
|
||||
/* Similar to _iram_start, this symbol goes here so it is
|
||||
resolved by addr2line in preference to the first symbol in
|
||||
the flash.text segment.
|
||||
*/
|
||||
_flash_cache_start = ABSOLUTE(0);
|
||||
} >iram0_2_seg
|
||||
}
|
81
tools/ldgen/sdkconfig.py
Normal file
81
tools/ldgen/sdkconfig.py
Normal file
@ -0,0 +1,81 @@
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
from pyparsing import Word, alphanums, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
|
||||
|
||||
import sys
|
||||
try:
|
||||
import kconfiglib
|
||||
except ImportError:
|
||||
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new")
|
||||
sys.path.append(kconfig_new_dir)
|
||||
import kconfiglib
|
||||
|
||||
|
||||
class SDKConfig:
|
||||
"""
|
||||
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
|
||||
evaluation of logical expressions involving those entries.
|
||||
"""
|
||||
|
||||
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
|
||||
IDENTIFIER = Word(alphanums.upper() + "_")
|
||||
|
||||
HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
|
||||
DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0]))
|
||||
LITERAL = Word(printables.replace(":", ""))
|
||||
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
|
||||
|
||||
VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL
|
||||
|
||||
# Operators supported by the expression evaluation
|
||||
OPERATOR = oneOf(["=", "!=", ">", "<", "<=", ">="])
|
||||
|
||||
def __init__(self, kconfig_file, sdkconfig_file):
|
||||
self.config = kconfiglib.Kconfig(kconfig_file)
|
||||
self.config.load_config(sdkconfig_file)
|
||||
|
||||
def evaluate_expression(self, expression):
|
||||
result = self.config.eval_string(expression)
|
||||
|
||||
if result == 0: # n
|
||||
return False
|
||||
elif result == 2: # y
|
||||
return True
|
||||
else: # m
|
||||
raise Exception("unsupported config expression result")
|
||||
|
||||
@staticmethod
|
||||
def get_expression_grammar():
|
||||
identifier = SDKConfig.IDENTIFIER.setResultsName("identifier")
|
||||
operator = SDKConfig.OPERATOR.setResultsName("operator")
|
||||
value = SDKConfig.VALUE.setResultsName("value")
|
||||
|
||||
test_binary = identifier + operator + value
|
||||
test_single = identifier
|
||||
|
||||
test = test_binary | test_single
|
||||
|
||||
condition = Group(Optional("(").suppress() + test + Optional(")").suppress())
|
||||
|
||||
grammar = infixNotation(condition, [
|
||||
("!", 1, opAssoc.RIGHT),
|
||||
("&&", 2, opAssoc.LEFT),
|
||||
("||", 2, opAssoc.LEFT)])
|
||||
|
||||
return grammar
|
19
tools/ldgen/test/data/Kconfig
Normal file
19
tools/ldgen/test/data/Kconfig
Normal file
@ -0,0 +1,19 @@
|
||||
menu "Test config"
|
||||
|
||||
config PERFORMANCE_LEVEL
|
||||
int
|
||||
range 0 3
|
||||
prompt "Performance level"
|
||||
|
||||
config A
|
||||
bool
|
||||
default "y"
|
||||
|
||||
config B
|
||||
bool
|
||||
default "n"
|
||||
|
||||
config C
|
||||
bool
|
||||
default "y"
|
||||
endmenu
|
84
tools/ldgen/test/data/sample.lf
Normal file
84
tools/ldgen/test/data/sample.lf
Normal file
@ -0,0 +1,84 @@
|
||||
[sections:text]
|
||||
entries:
|
||||
.text+
|
||||
.literal+
|
||||
|
||||
[sections:data]
|
||||
entries:
|
||||
.data+
|
||||
|
||||
[sections:bss]
|
||||
entries:
|
||||
.bss+
|
||||
|
||||
[sections:common]
|
||||
entries:
|
||||
COMMON
|
||||
|
||||
[sections:rodata]
|
||||
entries:
|
||||
.rodata+
|
||||
|
||||
[sections:rtc_text]
|
||||
entries:
|
||||
.rtc.text
|
||||
.rtc.literal
|
||||
|
||||
[sections:rtc_data]
|
||||
entries:
|
||||
.rtc.data
|
||||
|
||||
[sections:rtc_rodata]
|
||||
entries:
|
||||
.rtc.rodata
|
||||
|
||||
[sections:rtc_bss]
|
||||
entries:
|
||||
.rtc.bss
|
||||
|
||||
[sections:extram_bss]
|
||||
entries:
|
||||
.exram.bss
|
||||
|
||||
[sections:iram]
|
||||
entries:
|
||||
.iram+
|
||||
|
||||
[sections:dram]
|
||||
entries:
|
||||
.dram+
|
||||
|
||||
[scheme:default]
|
||||
entries:
|
||||
text -> flash_text
|
||||
rodata -> flash_rodata
|
||||
data -> dram0_data
|
||||
bss -> dram0_bss
|
||||
common -> dram0_bss
|
||||
iram -> iram0_text
|
||||
dram -> dram0_data
|
||||
rtc_text -> rtc_text
|
||||
rtc_data -> rtc_data
|
||||
rtc_rodata -> rtc_data
|
||||
rtc_bss -> rtc_bss
|
||||
|
||||
[scheme:rtc]
|
||||
entries:
|
||||
text -> rtc_text
|
||||
data -> rtc_data
|
||||
rodata -> rtc_data
|
||||
bss -> rtc_bss
|
||||
common -> rtc_bss
|
||||
|
||||
[scheme:noflash]
|
||||
entries:
|
||||
text -> iram0_text
|
||||
rodata -> dram0_data
|
||||
|
||||
[scheme:noflash_text]
|
||||
entries:
|
||||
text -> iram0_text
|
||||
|
||||
[scheme:noflash_data]
|
||||
entries:
|
||||
rodata -> dram0_data
|
8
tools/ldgen/test/data/sdkconfig
Normal file
8
tools/ldgen/test/data/sdkconfig
Normal file
@ -0,0 +1,8 @@
|
||||
CONFIG_TEST_STRING="This Is~AString#$^#$&^(*&^#(*&^)(*@_)(#*_)(*_(*}Value"
|
||||
CONFIG_TEST_NON_STRING=y
|
||||
CONFIG_TEST_WHITESPACE= y
|
||||
CONFIG_TEST_EMPTY=
|
||||
CONFIG_TEST_POSITIVE_INT=110
|
||||
CONFIG_TEST_HEX_INT=0x8000
|
||||
CONFIG_TEST_NEGATIVE_INT=-9
|
||||
CONFIG_PERFORMANCE_LEVEL=0
|
1626
tools/ldgen/test/data/sections.info
Normal file
1626
tools/ldgen/test/data/sections.info
Normal file
File diff suppressed because it is too large
Load Diff
215
tools/ldgen/test/data/template.ld
Normal file
215
tools/ldgen/test/data/template.ld
Normal file
@ -0,0 +1,215 @@
|
||||
/* Default entry point: */
|
||||
ENTRY(call_start_cpu0);
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
/* RTC fast memory holds RTC wake stub code,
|
||||
including from any source file named rtc_wake_stub*.c
|
||||
*/
|
||||
.rtc.text :
|
||||
{
|
||||
. = ALIGN(4);
|
||||
|
||||
mapping[rtc_text]
|
||||
|
||||
*rtc_wake_stub*.o(.literal .text .literal.* .text.*)
|
||||
} >rtc_iram_seg
|
||||
|
||||
/* RTC slow memory holds RTC wake stub
|
||||
data/rodata, including from any source file
|
||||
named rtc_wake_stub*.c
|
||||
*/
|
||||
.rtc.data :
|
||||
{
|
||||
_rtc_data_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_data]
|
||||
|
||||
*rtc_wake_stub*.o(.data .rodata .data.* .rodata.* .bss .bss.*)
|
||||
_rtc_data_end = ABSOLUTE(.);
|
||||
} > rtc_slow_seg
|
||||
|
||||
/* RTC bss, from any source file named rtc_wake_stub*.c */
|
||||
.rtc.bss (NOLOAD) :
|
||||
{
|
||||
_rtc_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[rtc_bss]
|
||||
|
||||
*rtc_wake_stub*.o(.bss .bss.*)
|
||||
*rtc_wake_stub*.o(COMMON)
|
||||
_rtc_bss_end = ABSOLUTE(.);
|
||||
} > rtc_slow_seg
|
||||
|
||||
/* Send .iram0 code to iram */
|
||||
.iram0.vectors :
|
||||
{
|
||||
/* Vectors go to IRAM */
|
||||
_init_start = ABSOLUTE(.);
|
||||
/* Vectors according to builds/RF-2015.2-win32/esp108_v1_2_s5_512int_2/config.html */
|
||||
. = 0x0;
|
||||
KEEP(*(.WindowVectors.text));
|
||||
. = 0x180;
|
||||
KEEP(*(.Level2InterruptVector.text));
|
||||
. = 0x1c0;
|
||||
KEEP(*(.Level3InterruptVector.text));
|
||||
. = 0x200;
|
||||
KEEP(*(.Level4InterruptVector.text));
|
||||
. = 0x240;
|
||||
KEEP(*(.Level5InterruptVector.text));
|
||||
. = 0x280;
|
||||
KEEP(*(.DebugExceptionVector.text));
|
||||
. = 0x2c0;
|
||||
KEEP(*(.NMIExceptionVector.text));
|
||||
. = 0x300;
|
||||
KEEP(*(.KernelExceptionVector.text));
|
||||
. = 0x340;
|
||||
KEEP(*(.UserExceptionVector.text));
|
||||
. = 0x3C0;
|
||||
KEEP(*(.DoubleExceptionVector.text));
|
||||
. = 0x400;
|
||||
*(.*Vector.literal)
|
||||
|
||||
*(.UserEnter.literal);
|
||||
*(.UserEnter.text);
|
||||
. = ALIGN (16);
|
||||
*(.entry.text)
|
||||
*(.init.literal)
|
||||
*(.init)
|
||||
_init_end = ABSOLUTE(.);
|
||||
|
||||
/* This goes here, not at top of linker script, so addr2line finds it last,
|
||||
and uses it in preference to the first symbol in IRAM */
|
||||
_iram_start = ABSOLUTE(0);
|
||||
} > iram0_0_seg
|
||||
|
||||
.iram0.text :
|
||||
{
|
||||
/* Code marked as runnning out of IRAM */
|
||||
_iram_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[iram0_text]
|
||||
|
||||
_iram_text_end = ABSOLUTE(.);
|
||||
} > iram0_0_seg
|
||||
|
||||
.dram0.data :
|
||||
{
|
||||
_data_start = ABSOLUTE(.);
|
||||
|
||||
mapping[dram0_data]
|
||||
|
||||
*(.gnu.linkonce.d.*)
|
||||
*(.data1)
|
||||
*(.sdata)
|
||||
*(.sdata.*)
|
||||
*(.gnu.linkonce.s.*)
|
||||
*(.sdata2)
|
||||
*(.sdata2.*)
|
||||
*(.gnu.linkonce.s2.*)
|
||||
*(.jcr)
|
||||
_data_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} >dram0_0_seg
|
||||
|
||||
/* Shared RAM */
|
||||
.dram0.bss (NOLOAD) :
|
||||
{
|
||||
. = ALIGN (8);
|
||||
_bss_start = ABSOLUTE(.);
|
||||
|
||||
mapping[dram0_bss]
|
||||
|
||||
*(.dynsbss)
|
||||
*(.sbss)
|
||||
*(.sbss.*)
|
||||
*(.gnu.linkonce.sb.*)
|
||||
*(.scommon)
|
||||
*(.sbss2)
|
||||
*(.sbss2.*)
|
||||
*(.gnu.linkonce.sb2.*)
|
||||
*(.dynbss)
|
||||
*(.share.mem)
|
||||
*(.gnu.linkonce.b.*)
|
||||
|
||||
. = ALIGN (8);
|
||||
_bss_end = ABSOLUTE(.);
|
||||
_heap_start = ABSOLUTE(.);
|
||||
} >dram0_0_seg
|
||||
|
||||
.flash.rodata :
|
||||
{
|
||||
_rodata_start = ABSOLUTE(.);
|
||||
|
||||
mapping[flash_rodata]
|
||||
|
||||
*(.irom1.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.gnu.linkonce.r.*)
|
||||
*(.rodata1)
|
||||
__XT_EXCEPTION_TABLE_ = ABSOLUTE(.);
|
||||
*(.xt_except_table)
|
||||
*(.gcc_except_table .gcc_except_table.*)
|
||||
*(.gnu.linkonce.e.*)
|
||||
*(.gnu.version_r)
|
||||
. = (. + 3) & ~ 3;
|
||||
__eh_frame = ABSOLUTE(.);
|
||||
KEEP(*(.eh_frame))
|
||||
. = (. + 7) & ~ 3;
|
||||
/* C++ constructor and destructor tables, properly ordered: */
|
||||
__init_array_start = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.o(.ctors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.o) .ctors))
|
||||
KEEP (*(SORT(.ctors.*)))
|
||||
KEEP (*(.ctors))
|
||||
__init_array_end = ABSOLUTE(.);
|
||||
KEEP (*crtbegin.o(.dtors))
|
||||
KEEP (*(EXCLUDE_FILE (*crtend.o) .dtors))
|
||||
KEEP (*(SORT(.dtors.*)))
|
||||
KEEP (*(.dtors))
|
||||
/* C++ exception handlers table: */
|
||||
__XT_EXCEPTION_DESCS_ = ABSOLUTE(.);
|
||||
*(.xt_except_desc)
|
||||
*(.gnu.linkonce.h.*)
|
||||
__XT_EXCEPTION_DESCS_END__ = ABSOLUTE(.);
|
||||
*(.xt_except_desc_end)
|
||||
*(.dynamic)
|
||||
*(.gnu.version_d)
|
||||
_rodata_end = ABSOLUTE(.);
|
||||
/* Literals are also RO data. */
|
||||
_lit4_start = ABSOLUTE(.);
|
||||
*(*.lit4)
|
||||
*(.lit4.*)
|
||||
*(.gnu.linkonce.lit4.*)
|
||||
_lit4_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
_thread_local_start = ABSOLUTE(.);
|
||||
*(.tdata)
|
||||
*(.tdata.*)
|
||||
*(.tbss)
|
||||
*(.tbss.*)
|
||||
_thread_local_end = ABSOLUTE(.);
|
||||
. = ALIGN(4);
|
||||
} >drom0_0_seg
|
||||
|
||||
.flash.text :
|
||||
{
|
||||
_stext = .;
|
||||
_text_start = ABSOLUTE(.);
|
||||
|
||||
mapping[flash_text]
|
||||
|
||||
*(.stub .gnu.warning .gnu.linkonce.literal.* .gnu.linkonce.t.*.literal .gnu.linkonce.t.*)
|
||||
*(.irom0.text) /* catch stray ICACHE_RODATA_ATTR */
|
||||
*(.fini.literal)
|
||||
*(.fini)
|
||||
*(.gnu.version)
|
||||
_text_end = ABSOLUTE(.);
|
||||
_etext = .;
|
||||
|
||||
/* Similar to _iram_start, this symbol goes here so it is
|
||||
resolved by addr2line in preference to the first symbol in
|
||||
the flash.text segment.
|
||||
*/
|
||||
_flash_cache_start = ABSOLUTE(0);
|
||||
} >iram0_2_seg
|
||||
}
|
982
tools/ldgen/test/test_fragments.py
Executable file
982
tools/ldgen/test/test_fragments.py
Executable file
@ -0,0 +1,982 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
from pyparsing import Word, ParseException, ParseFatalException, alphanums
|
||||
|
||||
try:
|
||||
from fragments import FragmentFile, FRAGMENT_TYPES, Fragment, KeyGrammar
|
||||
from sdkconfig import SDKConfig
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from fragments import FragmentFile, FRAGMENT_TYPES, Fragment, KeyGrammar
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
|
||||
class SampleFragment(Fragment):
|
||||
|
||||
grammars = {
|
||||
"key_1": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 0, None, True),
|
||||
"key_2": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 0, None, False),
|
||||
"key_3": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 3, 5, False)
|
||||
}
|
||||
|
||||
def set_key_value(self, key, parse_results):
|
||||
if key == "key_1":
|
||||
self.key_1 = list()
|
||||
for result in parse_results:
|
||||
self.key_1.append(result["value"])
|
||||
elif key == "key_2":
|
||||
self.key_2 = list()
|
||||
for result in parse_results:
|
||||
self.key_2.append(result["value"])
|
||||
|
||||
def get_key_grammars(self):
|
||||
return self.__class__.grammars
|
||||
|
||||
|
||||
FRAGMENT_TYPES["test"] = SampleFragment
|
||||
|
||||
|
||||
class FragmentTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.sdkconfig = SDKConfig("data/Kconfig", "data/sdkconfig")
|
||||
|
||||
@staticmethod
|
||||
def create_fragment_file(contents, name="test_fragment.lf"):
|
||||
f = StringIO(contents)
|
||||
f.name = name
|
||||
return f
|
||||
|
||||
def test_basic(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
value_2 # comments should be ignored
|
||||
value_3
|
||||
# this is a comment as well
|
||||
key_2: value_a
|
||||
|
||||
# this is the last comment
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
self.assertEqual(len(fragment_file.fragments[0].key_1), 3)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3")
|
||||
self.assertEqual(len(fragment_file.fragments[0].key_2), 1)
|
||||
self.assertEqual(fragment_file.fragments[0].key_2[0], "value_a")
|
||||
|
||||
def test_duplicate_keys(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1: value_1
|
||||
key_1: value_a
|
||||
""")
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_empty_key(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
""")
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_conditional(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
if A = y:
|
||||
value_2
|
||||
value_3
|
||||
if A = n:
|
||||
value_4
|
||||
if B = n:
|
||||
value_5
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[3], "value_5")
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
if B = y:
|
||||
value_2
|
||||
elif C = y:
|
||||
value_3
|
||||
elif A = y:
|
||||
value_4
|
||||
else:
|
||||
value_5
|
||||
value_6
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[1], "value_3")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[2], "value_6")
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
if A = y:
|
||||
value_2
|
||||
if B = y:
|
||||
value_3
|
||||
else:
|
||||
value_4
|
||||
if C = y:
|
||||
value_5
|
||||
value_6
|
||||
value_7
|
||||
key_2:
|
||||
value_a
|
||||
if B != y:
|
||||
value_b
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[2], "value_4")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[3], "value_5")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[4], "value_6")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[5], "value_7")
|
||||
self.assertEqual(fragment_file.fragments[0].key_2[0], "value_a")
|
||||
self.assertEqual(fragment_file.fragments[0].key_2[1], "value_b")
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
if A = n:
|
||||
value_2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file.fragments[0].key_1), 0)
|
||||
|
||||
def test_empty_file(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
|
||||
|
||||
|
||||
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file.fragments), 0)
|
||||
|
||||
def test_setting_indent(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
value_2
|
||||
value_3
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
self.assertEqual(len(fragment_file.fragments[0].key_1), 3)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2")
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3")
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_1
|
||||
value_2 # first element dictates indent
|
||||
value_3
|
||||
""")
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_values_num_limit(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_3:
|
||||
value_1
|
||||
value_2
|
||||
value_3
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_3:
|
||||
value_1
|
||||
value_2
|
||||
value_3
|
||||
value_4
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file.fragments), 1)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_3:
|
||||
value_1
|
||||
value_2
|
||||
value_3
|
||||
value_4
|
||||
value_5
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file.fragments), 1)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_3:
|
||||
value_1
|
||||
value_2
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_3:
|
||||
value_1
|
||||
value_2
|
||||
value_3
|
||||
value_4
|
||||
value_5
|
||||
value_6
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_unsupported_key(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
value_a
|
||||
key_4:
|
||||
value_1
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_empty_fragment(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_empty_conditional(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
if B = y:
|
||||
else:
|
||||
value_1
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
if B = y:
|
||||
value_1
|
||||
else B = y:
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
if B = y:
|
||||
value_1
|
||||
elif B = y:
|
||||
else:
|
||||
value_2
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_out_of_order_conditional(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
elif B = y:
|
||||
value_1
|
||||
else:
|
||||
value_2
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_1:
|
||||
else:
|
||||
value_2
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_required_keys(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test]
|
||||
key_2:
|
||||
value_1
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_multiple_fragments(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[test:test1]
|
||||
key_1:
|
||||
value_1
|
||||
|
||||
[test:test2]
|
||||
key_1:
|
||||
value_2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
self.assertEqual(len(fragment_file.fragments), 2)
|
||||
self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1")
|
||||
self.assertEqual(fragment_file.fragments[1].key_1[0], "value_2")
|
||||
|
||||
def test_whole_conditional_fragment(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
if B = y:
|
||||
[test:test1]
|
||||
key_1:
|
||||
value_1
|
||||
else:
|
||||
[test:test2]
|
||||
key_1:
|
||||
value_2
|
||||
|
||||
if A = y:
|
||||
[test:test3]
|
||||
key_1:
|
||||
value_3
|
||||
if C = y:
|
||||
value_6
|
||||
|
||||
[test:test4]
|
||||
key_1:
|
||||
value_4
|
||||
|
||||
[test:test5]
|
||||
key_1:
|
||||
value_5
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file.fragments), 4)
|
||||
self.assertEqual(fragment_file.fragments[0].name, "test2")
|
||||
self.assertEqual(fragment_file.fragments[1].name, "test3")
|
||||
self.assertEqual(fragment_file.fragments[1].key_1[1], "value_6")
|
||||
self.assertEqual(fragment_file.fragments[2].name, "test4")
|
||||
self.assertEqual(fragment_file.fragments[3].name, "test5")
|
||||
|
||||
def test_equivalent_conditional_fragment(self):
|
||||
test_fragment1 = self.create_fragment_file(u"""
|
||||
if A = y:
|
||||
[test:test1]
|
||||
key_1:
|
||||
value_1
|
||||
else:
|
||||
[test:test2]
|
||||
key_1:
|
||||
value_2
|
||||
""")
|
||||
|
||||
fragment_file1 = FragmentFile(test_fragment1, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file1.fragments), 1)
|
||||
self.assertEqual(fragment_file1.fragments[0].key_1[0], "value_1")
|
||||
|
||||
test_fragment2 = self.create_fragment_file(u"""
|
||||
[test:test1]
|
||||
key_1:
|
||||
if A = y:
|
||||
value_1
|
||||
else:
|
||||
value_2
|
||||
""")
|
||||
|
||||
fragment_file2 = FragmentFile(test_fragment2, self.sdkconfig)
|
||||
self.assertEqual(len(fragment_file2.fragments), 1)
|
||||
self.assertEqual(fragment_file2.fragments[0].key_1[0], "value_1")
|
||||
|
||||
|
||||
class SectionsTest(FragmentTest):
|
||||
|
||||
def test_basic(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[sections:test]
|
||||
entries:
|
||||
.section1
|
||||
.section2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].entries, {".section1", ".section2"})
|
||||
|
||||
def test_duplicate_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[sections:test]
|
||||
entries:
|
||||
.section1
|
||||
.section2
|
||||
.section3
|
||||
.section2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].entries, {".section1", ".section2", ".section3"})
|
||||
|
||||
def test_empty_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[sections:test]
|
||||
entries:
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[sections:test]
|
||||
entries:
|
||||
if B = y:
|
||||
.section1
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
|
||||
class SchemeTest(FragmentTest):
|
||||
|
||||
def test_basic(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[scheme:test]
|
||||
entries:
|
||||
sections1 -> target1
|
||||
sections2 -> target2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].entries,
|
||||
{("sections1", "target1"),
|
||||
("sections2", "target2")})
|
||||
|
||||
def test_duplicate_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[scheme:test]
|
||||
entries:
|
||||
sections1 -> target1
|
||||
sections2 -> target2
|
||||
sections2 -> target2
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(fragment_file.fragments[0].entries,
|
||||
{("sections1", "target1"),
|
||||
("sections2", "target2")})
|
||||
|
||||
def test_empty_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[scheme:test]
|
||||
entries:
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[scheme:test]
|
||||
entries:
|
||||
if B = y:
|
||||
sections1 -> target1
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_improper_grammar(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[scheme:test]
|
||||
entries:
|
||||
sections1, target1 # improper separator
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
|
||||
class MappingTest(FragmentTest):
|
||||
|
||||
def test_basic(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj:symbol (noflash)
|
||||
obj (noflash)
|
||||
obj:symbol_2 (noflash)
|
||||
obj_2 (noflash)
|
||||
* (noflash)
|
||||
""")
|
||||
|
||||
expected = {("obj", "symbol", "noflash"),
|
||||
("obj", None, "noflash"),
|
||||
("obj", "symbol_2", "noflash"),
|
||||
("obj_2", None, "noflash"),
|
||||
("*", None, "noflash")}
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_archive(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
entries:
|
||||
* (default)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
lib1.a
|
||||
lib2.a
|
||||
entries:
|
||||
* (default)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_empty_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
lib.a
|
||||
entries:
|
||||
if B = y:
|
||||
* (noflash) # if condition is false, then no 'entries' key value
|
||||
""")
|
||||
|
||||
expected = set()
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
lib.a
|
||||
entries:
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_duplicate_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
lib.a
|
||||
entries:
|
||||
obj:symbol (noflash)
|
||||
obj:symbol (noflash)
|
||||
""")
|
||||
|
||||
expected = {("obj", "symbol", "noflash")}
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_invalid_grammar(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive:
|
||||
lib.a
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
entries:
|
||||
* (default)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj: (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj: ()
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj:symbol
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
(noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj:* (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
:symbol (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
*:symbol (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
|
||||
class DeprecatedMappingTest(FragmentTest):
|
||||
|
||||
def test_valid_grammar(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj:symbol (noflash)
|
||||
# Comments should not matter
|
||||
obj (noflash)
|
||||
# Nor should whitespace
|
||||
obj : symbol_2 ( noflash )
|
||||
obj_2 ( noflash )
|
||||
* (noflash)
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual("lib.a", fragment_file.fragments[0].archive)
|
||||
self.assertEqual("lib_a", fragment_file.fragments[0].name)
|
||||
|
||||
expected = {("obj", "symbol", "noflash"),
|
||||
("obj", None, "noflash"),
|
||||
("obj", "symbol_2", "noflash"),
|
||||
("obj_2", None, "noflash"),
|
||||
("*", None, "noflash")
|
||||
}
|
||||
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_explicit_blank_default_w_others(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = n
|
||||
obj_a (noflash)
|
||||
: default
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("*", None, "default")}
|
||||
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_implicit_blank_default_w_others(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = n
|
||||
obj_a (noflash)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("*", None, "default")}
|
||||
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_explicit_blank_default(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: default
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("*", None, "default")}
|
||||
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_implicit_blank_default(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: default
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("*", None, "default")}
|
||||
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_multiple_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = n
|
||||
obj_a1 (noflash)
|
||||
obj_a2 (noflash)
|
||||
: B = n
|
||||
obj_b1 (noflash)
|
||||
obj_b2 (noflash)
|
||||
obj_b3 (noflash)
|
||||
: C = n
|
||||
obj_c1 (noflash)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("obj_b1", None, "noflash"),
|
||||
("obj_b2", None, "noflash"),
|
||||
("obj_b3", None, "noflash")}
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_blank_entries(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = n
|
||||
obj_a (noflash)
|
||||
: B = n
|
||||
: C = n
|
||||
obj_c (noflash)
|
||||
: default
|
||||
obj (noflash)
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
expected = {("*", None, "default")}
|
||||
self.assertEqual(expected, fragment_file.fragments[0].entries)
|
||||
|
||||
def test_blank_first_condition(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
obj_a (noflash)
|
||||
: CONFIG_B = y
|
||||
obj_b (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_nonlast_default_1(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: default
|
||||
obj_a (noflash)
|
||||
: CONFIG_A = y
|
||||
obj_A (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_nonlast_default_2(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = y
|
||||
obj_A (noflash)
|
||||
: default
|
||||
obj_a (noflash)
|
||||
: B = y
|
||||
obj_B (noflash
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_nonlast_default_3(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = y
|
||||
obj_A (noflash)
|
||||
:
|
||||
obj_a (noflash)
|
||||
: B = y
|
||||
obj_B (noflash
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_duplicate_default_1(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: CONFIG_A = y
|
||||
obj_A (noflash)
|
||||
: default
|
||||
obj_a (noflash)
|
||||
: CONFIG_B = y
|
||||
obj_B (noflash)
|
||||
: default
|
||||
obj_a (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_duplicate_default_2(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: CONFIG_A = y
|
||||
obj_A (noflash)
|
||||
: CONFIG_B = y
|
||||
obj_a (noflash)
|
||||
: default
|
||||
obj_B (noflash)
|
||||
:
|
||||
obj_a (noflash)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_mixed_deprecated_mapping(self):
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
: A = n
|
||||
obj_A (noflash)
|
||||
: default
|
||||
obj_B (noflash)
|
||||
|
||||
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
if A = n:
|
||||
obj_A (noflash)
|
||||
else:
|
||||
obj_B (noflash)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
self.assertEqual(2, len(fragment_file.fragments))
|
||||
|
||||
self.assertEqual(fragment_file.fragments[0].entries,
|
||||
fragment_file.fragments[1].entries)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
1354
tools/ldgen/test/test_generation.py
Executable file
1354
tools/ldgen/test/test_generation.py
Executable file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user