mirror of
https://github.com/espressif/ESP8266_RTOS_SDK.git
synced 2025-06-03 02:56:35 +08:00
feat(make): Sync code from esp-idf and modify for ESP8266
Commit ID: f6bfe13e
This commit is contained in:
@ -16,13 +16,20 @@ endfunction()
|
||||
#
|
||||
function(register_component)
|
||||
get_filename_component(component_dir ${CMAKE_CURRENT_LIST_FILE} DIRECTORY)
|
||||
get_filename_component(component ${component_dir} NAME)
|
||||
set(component ${COMPONENT_NAME})
|
||||
|
||||
spaces2list(COMPONENT_SRCDIRS)
|
||||
spaces2list(COMPONENT_ADD_INCLUDEDIRS)
|
||||
spaces2list(COMPONENT_SRCEXCLUDE)
|
||||
|
||||
if(COMPONENT_SRCDIRS)
|
||||
# Warn user if both COMPONENT_SRCDIRS and COMPONENT_SRCS are set
|
||||
if(COMPONENT_SRCS)
|
||||
message(WARNING "COMPONENT_SRCDIRS and COMPONENT_SRCS are both set, COMPONENT_SRCS will be ignored")
|
||||
endif()
|
||||
|
||||
set(COMPONENT_SRCS "")
|
||||
|
||||
# Add to COMPONENT_SRCS by globbing in COMPONENT_SRCDIRS
|
||||
if(NOT COMPONENT_SRCS)
|
||||
foreach(dir ${COMPONENT_SRCDIRS})
|
||||
get_filename_component(abs_dir ${dir} ABSOLUTE BASE_DIR ${component_dir})
|
||||
if(NOT IS_DIRECTORY ${abs_dir})
|
||||
@ -39,6 +46,17 @@ function(register_component)
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
# Remove COMPONENT_SRCEXCLUDE matches
|
||||
foreach(exclude ${COMPONENT_SRCEXCLUDE})
|
||||
get_filename_component(exclude "${exclude}" ABSOLUTE ${component_dir})
|
||||
foreach(src ${COMPONENT_SRCS})
|
||||
get_filename_component(abs_src "${src}" ABSOLUTE ${component_dir})
|
||||
if("${exclude}" STREQUAL "${abs_src}") # compare as canonical paths
|
||||
list(REMOVE_ITEM COMPONENT_SRCS "${src}")
|
||||
endif()
|
||||
endforeach()
|
||||
endforeach()
|
||||
|
||||
# add as a PUBLIC library (if there are source files) or INTERFACE (if header only)
|
||||
if(COMPONENT_SRCS OR embed_binaries)
|
||||
add_library(${component} STATIC ${COMPONENT_SRCS})
|
||||
@ -84,6 +102,11 @@ function(register_component)
|
||||
endif()
|
||||
target_include_directories(${component} PRIVATE ${abs_dir})
|
||||
endforeach()
|
||||
|
||||
if(component IN_LIST BUILD_TEST_COMPONENTS)
|
||||
target_link_libraries(${component} "-L${CMAKE_CURRENT_BINARY_DIR}")
|
||||
target_link_libraries(${component} "-Wl,--whole-archive -l${component} -Wl,--no-whole-archive")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
function(register_config_only_component)
|
||||
@ -135,7 +158,7 @@ function(components_finish_registration)
|
||||
|
||||
get_target_property(a_type ${a} TYPE)
|
||||
if(${a_type} MATCHES .+_LIBRARY)
|
||||
set(COMPONENT_LIBRARIES "${COMPONENT_LIBRARIES};${a}")
|
||||
list(APPEND COMPONENT_LIBRARIES ${a})
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
@ -42,7 +42,7 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
|
||||
result = {}
|
||||
BUILT_IN_VARS = set(["MAKEFILE_LIST", "SHELL", "CURDIR", "MAKEFLAGS"])
|
||||
|
||||
for line in output.decode().split("\n"):
|
||||
for line in output.decode('utf-8').split("\n"):
|
||||
if line.startswith("# makefile"): # this line appears before any variable defined in the makefile itself
|
||||
next_is_makefile = True
|
||||
elif next_is_makefile:
|
||||
@ -82,10 +82,20 @@ def get_component_variables(project_path, component_path):
|
||||
if src is not None:
|
||||
srcs.append(src)
|
||||
make_vars["COMPONENT_SRCS"] = " ".join(srcs)
|
||||
else: # Use COMPONENT_SRCDIRS
|
||||
make_vars["COMPONENT_SRCDIRS"] = make_vars.get("COMPONENT_SRCDIRS", ".")
|
||||
else:
|
||||
component_srcs = list()
|
||||
for component_srcdir in make_vars.get("COMPONENT_SRCDIRS", ".").split(" "):
|
||||
component_srcdir_path = os.path.abspath(os.path.join(component_path, component_srcdir))
|
||||
|
||||
srcs = list()
|
||||
srcs += glob.glob(os.path.join(component_srcdir_path, "*.[cS]"))
|
||||
srcs += glob.glob(os.path.join(component_srcdir_path, "*.cpp"))
|
||||
srcs = [('"%s"' % str(os.path.relpath(s, component_path))) for s in srcs]
|
||||
|
||||
make_vars["COMPONENT_ADD_INCLUDEDIRS"] = make_vars.get("COMPONENT_ADD_INCLUDEDIRS", "include")
|
||||
component_srcs += srcs
|
||||
make_vars["COMPONENT_SRCS"] = " ".join(component_srcs)
|
||||
|
||||
make_vars["COMPONENT_ADD_INCLUDEDIRS"] = make_vars.get("COMPONENT_ADD_INCLUDEDIRS", "include")
|
||||
|
||||
return make_vars
|
||||
|
||||
@ -106,32 +116,10 @@ def convert_project(project_path):
|
||||
|
||||
component_paths = project_vars["COMPONENT_PATHS"].split(" ")
|
||||
|
||||
# "main" component is made special in cmake, so extract it from the component_paths list
|
||||
try:
|
||||
main_component_path = [ p for p in component_paths if os.path.basename(p) == "main" ][0]
|
||||
if debug:
|
||||
print("Found main component %s" % main_component_path)
|
||||
main_vars = get_component_variables(project_path, main_component_path)
|
||||
except IndexError:
|
||||
print("WARNING: Project has no 'main' component, but CMake-based system requires at least one file in MAIN_SRCS...")
|
||||
main_vars = { "COMPONENT_SRCS" : ""} # dummy for MAIN_SRCS
|
||||
|
||||
# Remove main component from list of components we're converting to cmake
|
||||
component_paths = [ p for p in component_paths if os.path.basename(p) != "main" ]
|
||||
|
||||
# Convert components as needed
|
||||
for p in component_paths:
|
||||
convert_component(project_path, p)
|
||||
|
||||
# Look up project variables before we start writing the file, so nothing
|
||||
# is created if there is an error
|
||||
|
||||
main_srcs = main_vars["COMPONENT_SRCS"].split(" ")
|
||||
# convert from component-relative to absolute paths
|
||||
main_srcs = [ os.path.normpath(os.path.join(main_component_path, m)) for m in main_srcs ]
|
||||
# convert to make relative to the project directory
|
||||
main_srcs = [ os.path.relpath(m, project_path) for m in main_srcs ]
|
||||
|
||||
project_name = project_vars["PROJECT_NAME"]
|
||||
|
||||
# Generate the project CMakeLists.txt file
|
||||
@ -139,12 +127,11 @@ def convert_project(project_path):
|
||||
f.write("""
|
||||
# (Automatically converted from project Makefile by convert_to_cmake.py.)
|
||||
|
||||
# The following four lines of boilerplate have to be in your project's CMakeLists
|
||||
# The following lines of boilerplate have to be in your project's CMakeLists
|
||||
# in this exact order for cmake to work correctly
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
""")
|
||||
f.write("set(MAIN_SRCS %s)\n" % " ".join(main_srcs))
|
||||
f.write("""
|
||||
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
|
||||
""")
|
||||
@ -164,16 +151,6 @@ def convert_component(project_path, component_path):
|
||||
# Look up all the variables before we start writing the file, so it's not
|
||||
# created if there's an erro
|
||||
component_srcs = v.get("COMPONENT_SRCS", None)
|
||||
component_srcdirs = None
|
||||
if component_srcs is not None:
|
||||
# see if we should be using COMPONENT_SRCS or COMPONENT_SRCDIRS, if COMPONENT_SRCS is everything in SRCDIRS
|
||||
component_allsrcs = []
|
||||
for d in v.get("COMPONENT_SRCDIRS", "").split(" "):
|
||||
component_allsrcs += glob.glob(os.path.normpath(os.path.join(component_path, d, "*.[cS]")))
|
||||
component_allsrcs += glob.glob(os.path.normpath(os.path.join(component_path, d, "*.cpp")))
|
||||
abs_component_srcs = [os.path.normpath(os.path.join(component_path, p)) for p in component_srcs.split(" ")]
|
||||
if set(component_allsrcs) == set(abs_component_srcs):
|
||||
component_srcdirs = v.get("COMPONENT_SRCDIRS")
|
||||
|
||||
component_add_includedirs = v["COMPONENT_ADD_INCLUDEDIRS"]
|
||||
cflags = v.get("CFLAGS", None)
|
||||
@ -185,10 +162,7 @@ def convert_component(project_path, component_path):
|
||||
f.write("set(COMPONENT_REQUIRES "")\n")
|
||||
f.write("set(COMPONENT_PRIV_REQUIRES "")\n\n")
|
||||
|
||||
if component_srcdirs is not None:
|
||||
f.write("set(COMPONENT_SRCDIRS %s)\n\n" % component_srcdirs)
|
||||
f.write("register_component()\n")
|
||||
elif component_srcs is not None:
|
||||
if component_srcs is not None:
|
||||
f.write("set(COMPONENT_SRCS %s)\n\n" % component_srcs)
|
||||
f.write("register_component()\n")
|
||||
else:
|
||||
|
@ -17,7 +17,6 @@ function(crosstool_version_check expected_ctng_version)
|
||||
OUTPUT_QUIET)
|
||||
|
||||
string(REGEX MATCH "crosstool-ng-[0-9a-g\\.-]+" ctng_version "${toolchain_stderr}")
|
||||
string(REPLACE "crosstool-ng-" "" ctng_version "${ctng_version}")
|
||||
# We use FIND to match version instead of STREQUAL because some toolchains are built
|
||||
# with longer git hash strings than others. This will match any version which starts with
|
||||
# the expected version string.
|
||||
@ -30,3 +29,21 @@ function(crosstool_version_check expected_ctng_version)
|
||||
"doesn't match supported version ${expected_ctng_version}. ${ctng_version_warning}")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
function(get_expected_ctng_version _toolchain_ver _gcc_ver)
|
||||
file(STRINGS ${IDF_PATH}/tools/toolchain_versions.mk config_contents)
|
||||
foreach(name_and_value ${config_contents})
|
||||
# Strip spaces
|
||||
string(REPLACE " " "" name_and_value ${name_and_value})
|
||||
# Find variable name
|
||||
string(REGEX MATCH "^[^=]+" name ${name_and_value})
|
||||
# Find the value
|
||||
string(REPLACE "${name}=" "" value ${name_and_value})
|
||||
# Getting values
|
||||
if("${name}" STREQUAL "SUPPORTED_TOOLCHAIN_COMMIT_DESC")
|
||||
set("${_toolchain_ver}" "${value}" PARENT_SCOPE)
|
||||
elseif("${name}" STREQUAL "SUPPORTED_TOOLCHAIN_GCC_VERSIONS")
|
||||
set(${_gcc_ver} "${value}" PARENT_SCOPE)
|
||||
endif()
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
@ -15,15 +15,22 @@ macro(idf_set_global_variables)
|
||||
|
||||
# Commmon components, required by every component in the build
|
||||
#
|
||||
set_default(COMPONENT_REQUIRES_COMMON "cxx esp32 newlib freertos heap log soc")
|
||||
set_default(COMPONENT_REQUIRES_COMMON "esp8266 newlib freertos heap log")
|
||||
|
||||
# PROJECT_PATH has the path to the IDF project (top-level cmake directory)
|
||||
#
|
||||
# (cmake calls this CMAKE_SOURCE_DIR, keeping old name for compatibility.)
|
||||
set(PROJECT_PATH "${CMAKE_SOURCE_DIR}")
|
||||
|
||||
# Note: Unlike older build system, "main" is no longer a component. See build docs for details.
|
||||
set_default(COMPONENT_DIRS "${PROJECT_PATH}/components ${EXTRA_COMPONENT_DIRS} ${IDF_PATH}/components")
|
||||
if(MAIN_SRCS)
|
||||
message(WARNING "main is now a component, use of MAIN_SRCS is deprecated")
|
||||
set_default(COMPONENT_DIRS "${PROJECT_PATH}/components ${EXTRA_COMPONENT_DIRS} \
|
||||
${IDF_PATH}/components")
|
||||
else()
|
||||
set_default(COMPONENT_DIRS "${PROJECT_PATH}/components ${EXTRA_COMPONENT_DIRS} \
|
||||
${IDF_PATH}/components ${PROJECT_PATH}/main")
|
||||
endif()
|
||||
|
||||
spaces2list(COMPONENT_DIRS)
|
||||
|
||||
spaces2list(COMPONENTS)
|
||||
@ -33,6 +40,13 @@ macro(idf_set_global_variables)
|
||||
|
||||
# path to idf.py tool
|
||||
set(IDFTOOL ${PYTHON} "${IDF_PATH}/tools/idf.py")
|
||||
|
||||
# Temporary trick to support both gcc5 and gcc8 builds
|
||||
if(CMAKE_C_COMPILER_VERSION VERSION_EQUAL 5.2.0)
|
||||
set(GCC_NOT_5_2_0 0)
|
||||
else()
|
||||
set(GCC_NOT_5_2_0 1)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Add all the IDF global compiler & preprocessor options
|
||||
@ -51,14 +65,16 @@ function(idf_set_global_compiler_options)
|
||||
add_compile_options(-Og)
|
||||
endif()
|
||||
|
||||
add_c_compile_options(-std=gnu99)
|
||||
# Note: the visual studio generator doesn't support this syntax
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:C>:-std=gnu99>")
|
||||
|
||||
add_cxx_compile_options(-std=gnu++11 -fno-rtti)
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:CXX>:-std=gnu++11>")
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:CXX>:-fno-rtti>")
|
||||
|
||||
if(CONFIG_CXX_EXCEPTIONS)
|
||||
add_cxx_compile_options(-fexceptions)
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:CXX>:-fexceptions>")
|
||||
else()
|
||||
add_cxx_compile_options(-fno-exceptions)
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:CXX>:-fno-exceptions>")
|
||||
endif()
|
||||
|
||||
# Default compiler configuration
|
||||
@ -75,10 +91,17 @@ function(idf_set_global_compiler_options)
|
||||
-Wextra
|
||||
-Wno-unused-parameter
|
||||
-Wno-sign-compare)
|
||||
add_c_compile_options(
|
||||
-Wno-old-style-declaration
|
||||
add_compile_options("$<$<COMPILE_LANGUAGE:C>:-Wno-old-style-declaration>")
|
||||
|
||||
if(CONFIG_DISABLE_GCC8_WARNINGS)
|
||||
add_compile_options(
|
||||
-Wno-parentheses
|
||||
-Wno-sizeof-pointer-memaccess
|
||||
-Wno-clobbered
|
||||
)
|
||||
|
||||
endif()
|
||||
|
||||
# Stack protection
|
||||
if(NOT BOOTLOADER_BUILD)
|
||||
if(CONFIG_STACK_CHECK_NORM)
|
||||
@ -98,8 +121,6 @@ function(idf_set_global_compiler_options)
|
||||
# go into the final binary so have no impact on size)
|
||||
add_compile_options(-ggdb)
|
||||
|
||||
add_compile_options("-I${CMAKE_BINARY_DIR}") # for sdkconfig.h
|
||||
|
||||
# Enable ccache if it's on the path
|
||||
if(NOT CCACHE_DISABLE)
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
@ -109,6 +130,8 @@ function(idf_set_global_compiler_options)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Temporary trick to support both gcc5 and gcc8 builds
|
||||
add_definitions(-DGCC_NOT_5_2_0=${GCC_NOT_5_2_0})
|
||||
endfunction()
|
||||
|
||||
|
||||
@ -121,15 +144,17 @@ function(idf_verify_environment)
|
||||
|
||||
# Check toolchain is configured properly in cmake
|
||||
if(NOT ( ${CMAKE_SYSTEM_NAME} STREQUAL "Generic" AND ${CMAKE_C_COMPILER} MATCHES xtensa))
|
||||
message(FATAL_ERROR "Internal error, toolchain has not been set correctly by project")
|
||||
message(FATAL_ERROR "Internal error, toolchain has not been set correctly by project "
|
||||
"(or an invalid CMakeCache.txt file has been generated somehow)")
|
||||
endif()
|
||||
|
||||
#
|
||||
# Warn if the toolchain version doesn't match
|
||||
#
|
||||
# TODO: make these platform-specific for diff toolchains
|
||||
#gcc_version_check("5.2.0")
|
||||
#crosstool_version_check("1.22.0-80-g6c4433a")
|
||||
get_expected_ctng_version(expected_toolchain expected_gcc)
|
||||
gcc_version_check("${expected_gcc}")
|
||||
crosstool_version_check("${expected_toolchain}")
|
||||
|
||||
endfunction()
|
||||
|
||||
@ -141,8 +166,22 @@ endfunction()
|
||||
function(idf_add_executable)
|
||||
set(exe_target ${PROJECT_NAME}.elf)
|
||||
|
||||
spaces2list(MAIN_SRCS)
|
||||
add_executable(${exe_target} "${MAIN_SRCS}")
|
||||
if(MAIN_SRCS)
|
||||
spaces2list(MAIN_SRCS)
|
||||
add_executable(${exe_target} ${MAIN_SRCS})
|
||||
else()
|
||||
# Create a dummy file to work around CMake requirement of having a source
|
||||
# file while adding an executable
|
||||
add_executable(${exe_target} "${CMAKE_CURRENT_BINARY_DIR}/dummy_main_src.c")
|
||||
add_custom_command(OUTPUT dummy_main_src.c
|
||||
COMMAND ${CMAKE_COMMAND} -E touch dummy_main_src.c
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
VERBATIM)
|
||||
|
||||
add_custom_target(dummy_main_src DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/dummy_main_src.c)
|
||||
|
||||
add_dependencies(${exe_target} dummy_main_src)
|
||||
endif()
|
||||
|
||||
add_map_file(${exe_target})
|
||||
endfunction()
|
||||
@ -196,7 +235,11 @@ endfunction()
|
||||
# Running git_describe() here automatically triggers rebuilds
|
||||
# if the ESP-IDF git version changes
|
||||
function(idf_get_git_revision)
|
||||
git_describe(IDF_VER "${IDF_PATH}")
|
||||
if(EXISTS "${IDF_PATH}/version.txt")
|
||||
file(STRINGS "${IDF_PATH}/version.txt" IDF_VER)
|
||||
else()
|
||||
git_describe(IDF_VER "${IDF_PATH}")
|
||||
endif()
|
||||
add_definitions(-DIDF_VER=\"${IDF_VER}\")
|
||||
git_submodule_check("${IDF_PATH}")
|
||||
set(IDF_VER ${IDF_VER} PARENT_SCOPE)
|
||||
|
@ -1,27 +1,46 @@
|
||||
include(ExternalProject)
|
||||
|
||||
macro(kconfig_set_variables)
|
||||
set(CONFIG_DIR ${CMAKE_BINARY_DIR}/config)
|
||||
set_default(SDKCONFIG ${PROJECT_PATH}/sdkconfig)
|
||||
set(SDKCONFIG_HEADER ${CMAKE_BINARY_DIR}/sdkconfig.h)
|
||||
set(SDKCONFIG_CMAKE ${CMAKE_BINARY_DIR}/sdkconfig.cmake)
|
||||
set(SDKCONFIG_JSON ${CMAKE_BINARY_DIR}/sdkconfig.json)
|
||||
set(SDKCONFIG_HEADER ${CONFIG_DIR}/sdkconfig.h)
|
||||
set(SDKCONFIG_CMAKE ${CONFIG_DIR}/sdkconfig.cmake)
|
||||
set(SDKCONFIG_JSON ${CONFIG_DIR}/sdkconfig.json)
|
||||
set(KCONFIG_JSON_MENUS ${CONFIG_DIR}/kconfig_menus.json)
|
||||
|
||||
set(ROOT_KCONFIG ${IDF_PATH}/Kconfig)
|
||||
|
||||
set_default(SDKCONFIG_DEFAULTS "${SDKCONFIG}.defaults")
|
||||
|
||||
# ensure all source files can include sdkconfig.h
|
||||
include_directories("${CONFIG_DIR}")
|
||||
endmacro()
|
||||
|
||||
if(CMAKE_HOST_WIN32)
|
||||
# Prefer a prebuilt mconf on Windows
|
||||
find_program(WINPTY winpty)
|
||||
find_program(MCONF mconf)
|
||||
# Prefer a prebuilt mconf-idf on Windows
|
||||
if(DEFINED ENV{MSYSTEM})
|
||||
find_program(WINPTY winpty)
|
||||
else()
|
||||
unset(WINPTY CACHE) # in case previous CMake run was in a tty and this one is not
|
||||
endif()
|
||||
find_program(MCONF mconf-idf)
|
||||
|
||||
# Fall back to the old binary which was called 'mconf' not 'mconf-idf'
|
||||
if(NOT MCONF)
|
||||
find_program(MCONF mconf)
|
||||
if(MCONF)
|
||||
message(WARNING "Falling back to mconf binary '${MCONF}' not mconf-idf. "
|
||||
"This is probably because an old version of IDF mconf is installed and this is fine. "
|
||||
"However if there are config problems please check the Getting Started guide for your platform.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT MCONF)
|
||||
find_program(NATIVE_GCC gcc)
|
||||
if(NOT NATIVE_GCC)
|
||||
message(FATAL_ERROR
|
||||
"Windows requires a prebuilt ESP-IDF-specific mconf for your platform "
|
||||
"on the PATH, or an MSYS2 version of gcc on the PATH to build mconf. "
|
||||
"Windows requires a prebuilt mconf-idf for your platform "
|
||||
"on the PATH, or an MSYS2 version of gcc on the PATH to build mconf-idf. "
|
||||
"Consult the setup docs for ESP-IDF on Windows.")
|
||||
endif()
|
||||
elseif(WINPTY)
|
||||
@ -32,29 +51,38 @@ endif()
|
||||
if(NOT MCONF)
|
||||
# Use the existing Makefile to build mconf (out of tree) when needed
|
||||
#
|
||||
set(MCONF kconfig_bin/mconf)
|
||||
set(MCONF kconfig_bin/mconf-idf)
|
||||
|
||||
externalproject_add(mconf
|
||||
externalproject_add(mconf-idf
|
||||
SOURCE_DIR ${IDF_PATH}/tools/kconfig
|
||||
CONFIGURE_COMMAND ""
|
||||
BINARY_DIR "kconfig_bin"
|
||||
BUILD_COMMAND make -f ${IDF_PATH}/tools/kconfig/Makefile mconf
|
||||
BUILD_COMMAND make -f ${IDF_PATH}/tools/kconfig/Makefile mconf-idf
|
||||
BUILD_BYPRODUCTS ${MCONF}
|
||||
INSTALL_COMMAND ""
|
||||
EXCLUDE_FROM_ALL 1
|
||||
)
|
||||
set(menuconfig_depends DEPENDS mconf)
|
||||
|
||||
file(GLOB mconf_srcfiles ${IDF_PATH}/tools/kconfig/*.c)
|
||||
externalproject_add_stepdependencies(mconf-idf build
|
||||
${mconf_srcfiles}
|
||||
${IDF_PATH}/tools/kconfig/Makefile
|
||||
${CMAKE_CURRENT_LIST_FILE})
|
||||
unset(mconf_srcfiles)
|
||||
|
||||
set(menuconfig_depends DEPENDS mconf-idf)
|
||||
|
||||
endif()
|
||||
|
||||
# Find all Kconfig files for all components
|
||||
function(kconfig_process_config)
|
||||
file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/include/config")
|
||||
file(MAKE_DIRECTORY "${CONFIG_DIR}")
|
||||
set(kconfigs)
|
||||
set(kconfigs_projbuild)
|
||||
|
||||
# Find Kconfig and Kconfig.projbuild for each component as applicable
|
||||
# if any of these change, cmake should rerun
|
||||
foreach(dir ${BUILD_COMPONENT_PATHS} "${CMAKE_SOURCE_DIR}/main")
|
||||
foreach(dir ${BUILD_COMPONENT_PATHS})
|
||||
file(GLOB kconfig "${dir}/Kconfig")
|
||||
if(kconfig)
|
||||
set(kconfigs "${kconfigs} ${kconfig}")
|
||||
@ -81,11 +109,11 @@ function(kconfig_process_config)
|
||||
--kconfig ${ROOT_KCONFIG}
|
||||
--config ${SDKCONFIG}
|
||||
${defaults_arg}
|
||||
--create-config-if-missing
|
||||
--env "COMPONENT_KCONFIGS=${kconfigs}"
|
||||
--env "COMPONENT_KCONFIGS_PROJBUILD=${kconfigs_projbuild}")
|
||||
--env "COMPONENT_KCONFIGS_PROJBUILD=${kconfigs_projbuild}"
|
||||
--env "IDF_CMAKE=y")
|
||||
|
||||
# Generate the menuconfig target (uses C-based mconf tool, either prebuilt or via mconf target above)
|
||||
# Generate the menuconfig target (uses C-based mconf-idf tool, either prebuilt or via mconf-idf target above)
|
||||
add_custom_target(menuconfig
|
||||
${menuconfig_depends}
|
||||
# create any missing config file, with defaults if necessary
|
||||
@ -93,20 +121,44 @@ function(kconfig_process_config)
|
||||
COMMAND ${CMAKE_COMMAND} -E env
|
||||
"COMPONENT_KCONFIGS=${kconfigs}"
|
||||
"COMPONENT_KCONFIGS_PROJBUILD=${kconfigs_projbuild}"
|
||||
"IDF_CMAKE=y"
|
||||
"KCONFIG_CONFIG=${SDKCONFIG}"
|
||||
${MCONF} ${ROOT_KCONFIG}
|
||||
VERBATIM
|
||||
USES_TERMINAL)
|
||||
|
||||
# Custom target to run confserver.py from the build tool
|
||||
add_custom_target(confserver
|
||||
COMMAND ${CMAKE_COMMAND} -E env
|
||||
"COMPONENT_KCONFIGS=${kconfigs}"
|
||||
"COMPONENT_KCONFIGS_PROJBUILD=${kconfigs_projbuild}"
|
||||
${PYTHON} ${IDF_PATH}/tools/kconfig_new/confserver.py
|
||||
--kconfig ${IDF_PATH}/Kconfig --config ${SDKCONFIG}
|
||||
VERBATIM
|
||||
USES_TERMINAL)
|
||||
|
||||
# Generate configuration output via confgen.py
|
||||
# makes sdkconfig.h and skdconfig.cmake
|
||||
#
|
||||
# This happens during the cmake run not during the build
|
||||
execute_process(COMMAND ${confgen_basecommand}
|
||||
--output header ${SDKCONFIG_HEADER}
|
||||
--output cmake ${SDKCONFIG_CMAKE}
|
||||
--output json ${SDKCONFIG_JSON}
|
||||
RESULT_VARIABLE config_result)
|
||||
if(NOT BOOTLOADER_BUILD)
|
||||
execute_process(
|
||||
COMMAND ${confgen_basecommand}
|
||||
--output header ${SDKCONFIG_HEADER}
|
||||
--output cmake ${SDKCONFIG_CMAKE}
|
||||
--output json ${SDKCONFIG_JSON}
|
||||
--output json_menus ${KCONFIG_JSON_MENUS}
|
||||
--output config ${SDKCONFIG} # only generate config at the top-level project
|
||||
RESULT_VARIABLE config_result)
|
||||
else()
|
||||
execute_process(
|
||||
COMMAND ${confgen_basecommand}
|
||||
--output header ${SDKCONFIG_HEADER}
|
||||
--output cmake ${SDKCONFIG_CMAKE}
|
||||
--output json ${SDKCONFIG_JSON}
|
||||
--output json_menus ${KCONFIG_JSON_MENUS}
|
||||
RESULT_VARIABLE config_result)
|
||||
endif()
|
||||
if(config_result)
|
||||
message(FATAL_ERROR "Failed to run confgen.py (${confgen_basecommand}). Error ${config_result}")
|
||||
endif()
|
||||
|
@ -6,11 +6,12 @@ cmake_minimum_required(VERSION 3.5)
|
||||
set(IDF_PATH "$ENV{IDF_PATH}")
|
||||
if(NOT IDF_PATH)
|
||||
# Documentation says you should set IDF_PATH in your environment, but we
|
||||
# can infer it here if it's not set.
|
||||
set(IDF_PATH ${CMAKE_CURRENT_LIST_DIR})
|
||||
# can infer it relative to tools/cmake directory if it's not set.
|
||||
get_filename_component(IDF_PATH "${CMAKE_CURRENT_LIST_DIR}/../.." ABSOLUTE)
|
||||
endif()
|
||||
file(TO_CMAKE_PATH "${IDF_PATH}" IDF_PATH)
|
||||
set($ENV{IDF_PATH} "${IDF_PATH}")
|
||||
set(ENV{IDF_PATH} ${IDF_PATH})
|
||||
|
||||
|
||||
#
|
||||
# Load cmake modules
|
||||
@ -28,6 +29,15 @@ include(idf_functions)
|
||||
|
||||
set_default(PYTHON "python")
|
||||
|
||||
if(NOT PYTHON_DEPS_CHECKED AND NOT BOOTLOADER_BUILD)
|
||||
message(STATUS "Checking Python dependencies...")
|
||||
execute_process(COMMAND "${PYTHON}" "${IDF_PATH}/tools/check_python_dependencies.py"
|
||||
RESULT_VARIABLE result)
|
||||
if(NOT result EQUAL 0)
|
||||
message(FATAL_ERROR "Some Python dependencies must be installed. Check above message for details.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# project
|
||||
#
|
||||
# This macro wraps the cmake 'project' command to add
|
||||
@ -46,20 +56,24 @@ macro(project name)
|
||||
# Set global variables used by rest of the build
|
||||
idf_set_global_variables()
|
||||
|
||||
# Establish dependencies for components in the build
|
||||
# (this happens before we even generate config...)
|
||||
if(COMPONENTS)
|
||||
# Make sure if an explicit list of COMPONENTS is given, it contains the "common" component requirements
|
||||
# (otherwise, if COMPONENTS is empty then all components will be included in the build.)
|
||||
set(COMPONENTS "${COMPONENTS} ${COMPONENT_REQUIRES_COMMON}")
|
||||
endif()
|
||||
# Sort the components list, as it may be found via filesystem
|
||||
# traversal and therefore in a non-deterministic order
|
||||
list(SORT COMPONENTS)
|
||||
|
||||
execute_process(COMMAND "${CMAKE_COMMAND}"
|
||||
-D "COMPONENTS=${COMPONENTS}"
|
||||
-D "COMPONENT_REQUIRES_COMMON=${COMPONENT_REQUIRES_COMMON}"
|
||||
-D "EXCLUDE_COMPONENTS=${EXCLUDE_COMPONENTS}"
|
||||
-D "TEST_COMPONENTS=${TEST_COMPONENTS}"
|
||||
-D "TEST_EXCLUDE_COMPONENTS=${TEST_EXCLUDE_COMPONENTS}"
|
||||
-D "TESTS_ALL=${TESTS_ALL}"
|
||||
-D "DEPENDENCIES_FILE=${CMAKE_BINARY_DIR}/component_depends.cmake"
|
||||
-D "COMPONENT_DIRS=${COMPONENT_DIRS}"
|
||||
-D "BOOTLOADER_BUILD=${BOOTLOADER_BUILD}"
|
||||
-D "IDF_PATH=${IDF_PATH}"
|
||||
-D "DEBUG=${DEBUG}"
|
||||
-P "${IDF_PATH}/tools/cmake/scripts/expand_requirements.cmake"
|
||||
WORKING_DIRECTORY "${IDF_PATH}/tools/cmake")
|
||||
WORKING_DIRECTORY "${PROJECT_PATH}")
|
||||
include("${CMAKE_BINARY_DIR}/component_depends.cmake")
|
||||
|
||||
# We now have the following component-related variables:
|
||||
@ -73,6 +87,14 @@ macro(project name)
|
||||
unset(BUILD_COMPONENTS_SPACES)
|
||||
message(STATUS "Component paths: ${BUILD_COMPONENT_PATHS}")
|
||||
|
||||
# Print list of test components
|
||||
if(TESTS_ALL EQUAL 1 OR TEST_COMPONENTS)
|
||||
string(REPLACE ";" " " BUILD_TEST_COMPONENTS_SPACES "${BUILD_TEST_COMPONENTS}")
|
||||
message(STATUS "Test component names: ${BUILD_TEST_COMPONENTS_SPACES}")
|
||||
unset(BUILD_TEST_COMPONENTS_SPACES)
|
||||
message(STATUS "Test component paths: ${BUILD_TEST_COMPONENT_PATHS}")
|
||||
endif()
|
||||
|
||||
kconfig_set_variables()
|
||||
|
||||
kconfig_process_config()
|
||||
@ -83,7 +105,6 @@ macro(project name)
|
||||
# Now the configuration is loaded, set the toolchain appropriately
|
||||
#
|
||||
# TODO: support more toolchains than just ESP32
|
||||
#set(CMAKE_TOOLCHAIN_FILE $ENV{IDF_PATH}/tools/cmake/toolchain-esp32.cmake)
|
||||
set(CMAKE_TOOLCHAIN_FILE $ENV{IDF_PATH}/tools/cmake/toolchain-esp8266.cmake)
|
||||
|
||||
# Declare the actual cmake-level project
|
||||
@ -106,14 +127,24 @@ macro(project name)
|
||||
|
||||
# Include any top-level project_include.cmake files from components
|
||||
foreach(component ${BUILD_COMPONENT_PATHS})
|
||||
set(COMPONENT_PATH "${component}")
|
||||
include_if_exists("${component}/project_include.cmake")
|
||||
unset(COMPONENT_PATH)
|
||||
endforeach()
|
||||
|
||||
#
|
||||
# Add each component to the build as a library
|
||||
#
|
||||
foreach(COMPONENT_PATH ${BUILD_COMPONENT_PATHS})
|
||||
get_filename_component(COMPONENT_NAME ${COMPONENT_PATH} NAME)
|
||||
list(FIND BUILD_TEST_COMPONENT_PATHS ${COMPONENT_PATH} idx)
|
||||
|
||||
if(NOT idx EQUAL -1)
|
||||
list(GET BUILD_TEST_COMPONENTS ${idx} test_component)
|
||||
set(COMPONENT_NAME ${test_component})
|
||||
else()
|
||||
get_filename_component(COMPONENT_NAME ${COMPONENT_PATH} NAME)
|
||||
endif()
|
||||
|
||||
add_subdirectory(${COMPONENT_PATH} ${COMPONENT_NAME})
|
||||
endforeach()
|
||||
unset(COMPONENT_NAME)
|
||||
|
@ -15,18 +15,11 @@ if [ -z "${IDF_PATH}" ]; then
|
||||
exit 3
|
||||
fi
|
||||
|
||||
# exclusions include some third-party directories which contain upstream
|
||||
# CMakeLists files
|
||||
find ${IDF_PATH} \
|
||||
-name build -prune \
|
||||
-o -name third_party -prune \
|
||||
\
|
||||
-o -name 'nghttp2' -prune \
|
||||
-o -name 'cJSON' -prune \
|
||||
-o -name 'Findsodium.cmake' -prune \
|
||||
\
|
||||
-o -name CMakeLists.txt -print0 \
|
||||
-o -name '*.cmake' -print0 \
|
||||
| xargs -0 cmakelint --linelength=120 --spaces=4
|
||||
cd "$IDF_PATH"
|
||||
|
||||
# Only list the "main" IDF repo, don't check any files in submodules (which may contain
|
||||
# third party CMakeLists.txt)
|
||||
git ls-tree --full-tree --name-only -r HEAD | grep -v "/third_party/" | grep "^CMakeLists.txt$\|\.cmake$" \
|
||||
| xargs cmakelint --linelength=120 --spaces=4
|
||||
|
||||
|
||||
|
@ -67,6 +67,7 @@ endif()
|
||||
append_line(" */")
|
||||
|
||||
append_line(".data")
|
||||
append_line(".section .rodata.embedded")
|
||||
append_identifier("${varname}")
|
||||
append_identifier("_binary_${varname}_start" "for objcopy compatibility")
|
||||
append("${data}")
|
||||
|
@ -4,6 +4,8 @@
|
||||
# Parameters:
|
||||
# - COMPONENTS = Space-separated list of initial components to include in the build.
|
||||
# Can be empty, in which case all components are in the build.
|
||||
# - COMPONENT_REQUIRES_COMMON = Components to always include in the build, and treated as dependencies
|
||||
# of all other components.
|
||||
# - DEPENDENCIES_FILE = Path of generated cmake file which will contain the expanded dependencies for these
|
||||
# components.
|
||||
# - COMPONENT_DIRS = List of paths to search for all components.
|
||||
@ -13,9 +15,22 @@
|
||||
# components required for the build, and the get_component_requirements() function to return each component's
|
||||
# recursively expanded requirements.
|
||||
#
|
||||
# BUILD_COMPONENTS & BUILD_COMPONENT_PATHS will be ordered in a best-effort way so that dependencies are listed first.
|
||||
# (Note that IDF supports cyclic dependencies, and dependencies in a cycle have ordering guarantees.)
|
||||
#
|
||||
# Determinism:
|
||||
#
|
||||
# Given the the same list of names in COMPONENTS (regardless of order), and an identical value of
|
||||
# COMPONENT_REQUIRES_COMMON, and all the same COMPONENT_REQUIRES & COMPONENT_PRIV_REQUIRES values in
|
||||
# each component, then the output of BUILD_COMPONENTS should always be in the same
|
||||
# order.
|
||||
#
|
||||
# BUILD_COMPONENT_PATHS will be in the same component order as BUILD_COMPONENTS, even if the
|
||||
# actual component paths are different due to different paths.
|
||||
#
|
||||
# TODO: Error out if a component requirement is missing
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
include("utilities.cmake")
|
||||
include("${IDF_PATH}/tools/cmake/utilities.cmake")
|
||||
|
||||
if(NOT DEPENDENCIES_FILE)
|
||||
message(FATAL_ERROR "DEPENDENCIES_FILE must be set.")
|
||||
@ -26,6 +41,8 @@ if(NOT COMPONENT_DIRS)
|
||||
endif()
|
||||
spaces2list(COMPONENT_DIRS)
|
||||
|
||||
spaces2list(COMPONENT_REQUIRES_COMMON)
|
||||
|
||||
function(debug message)
|
||||
if(DEBUG)
|
||||
message(STATUS "${message}")
|
||||
@ -37,8 +54,16 @@ endfunction()
|
||||
# (expand_component_requirements() includes the component CMakeLists.txt, which then sets its component variables,
|
||||
# calls this dummy macro, and immediately exits again.)
|
||||
macro(register_component)
|
||||
spaces2list(COMPONENT_REQUIRES)
|
||||
set_property(GLOBAL PROPERTY "${COMPONENT}_REQUIRES" "${COMPONENT_REQUIRES}")
|
||||
if(COMPONENT STREQUAL main AND NOT COMPONENT_REQUIRES)
|
||||
set(main_component_requires ${COMPONENTS})
|
||||
list(REMOVE_ITEM main_component_requires "main")
|
||||
|
||||
set_property(GLOBAL PROPERTY "${COMPONENT}_REQUIRES" "${main_component_requires}")
|
||||
else()
|
||||
spaces2list(COMPONENT_REQUIRES)
|
||||
set_property(GLOBAL PROPERTY "${COMPONENT}_REQUIRES" "${COMPONENT_REQUIRES}")
|
||||
endif()
|
||||
|
||||
spaces2list(COMPONENT_PRIV_REQUIRES)
|
||||
set_property(GLOBAL PROPERTY "${COMPONENT}_PRIV_REQUIRES" "${COMPONENT_PRIV_REQUIRES}")
|
||||
|
||||
@ -57,14 +82,14 @@ endmacro()
|
||||
# return the path to the component in 'variable'
|
||||
#
|
||||
# Fatal error is printed if the component is not found.
|
||||
function(find_component_path find_name component_paths variable)
|
||||
foreach(path ${component_paths})
|
||||
get_filename_component(name "${path}" NAME)
|
||||
if("${name}" STREQUAL "${find_name}")
|
||||
set("${variable}" "${path}" PARENT_SCOPE)
|
||||
return()
|
||||
endif()
|
||||
endforeach()
|
||||
function(find_component_path find_name components component_paths variable)
|
||||
list(FIND components ${find_name} idx)
|
||||
if(NOT idx EQUAL -1)
|
||||
list(GET component_paths ${idx} path)
|
||||
set("${variable}" "${path}" PARENT_SCOPE)
|
||||
return()
|
||||
else()
|
||||
endif()
|
||||
# TODO: find a way to print the dependency chain that lead to this not-found component
|
||||
message(WARNING "Required component ${find_name} is not found in any of the provided COMPONENT_DIRS")
|
||||
endfunction()
|
||||
@ -75,10 +100,11 @@ endfunction()
|
||||
#
|
||||
# component_paths contains only unique component names. Directories
|
||||
# earlier in the component_dirs list take precedence.
|
||||
function(components_find_all component_dirs component_paths component_names)
|
||||
function(components_find_all component_dirs component_paths component_names test_component_names)
|
||||
# component_dirs entries can be files or lists of files
|
||||
set(paths "")
|
||||
set(names "")
|
||||
set(test_names "")
|
||||
|
||||
# start by expanding the component_dirs list with all subdirectories
|
||||
foreach(dir ${component_dirs})
|
||||
@ -91,15 +117,22 @@ function(components_find_all component_dirs component_paths component_names)
|
||||
|
||||
# Look for a component in each component_dirs entry
|
||||
foreach(dir ${component_dirs})
|
||||
debug("Looking for CMakeLists.txt in ${dir}")
|
||||
file(GLOB component "${dir}/CMakeLists.txt")
|
||||
if(component)
|
||||
debug("CMakeLists.txt file ${component}")
|
||||
get_filename_component(component "${component}" DIRECTORY)
|
||||
get_filename_component(name "${component}" NAME)
|
||||
if(NOT name IN_LIST names)
|
||||
set(names "${names};${name}")
|
||||
set(paths "${paths};${component}")
|
||||
endif()
|
||||
list(APPEND names "${name}")
|
||||
list(APPEND paths "${component}")
|
||||
|
||||
# Look for test component directory
|
||||
file(GLOB test "${component}/test/CMakeLists.txt")
|
||||
if(test)
|
||||
list(APPEND test_names "${name}")
|
||||
endif()
|
||||
endif()
|
||||
else() # no CMakeLists.txt file
|
||||
# test for legacy component.mk and warn
|
||||
file(GLOB legacy_component "${dir}/component.mk")
|
||||
@ -109,26 +142,28 @@ function(components_find_all component_dirs component_paths component_names)
|
||||
"Component will be skipped.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endforeach()
|
||||
|
||||
set(${component_paths} ${paths} PARENT_SCOPE)
|
||||
set(${component_names} ${names} PARENT_SCOPE)
|
||||
set(${test_component_names} ${test_names} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
|
||||
# expand_component_requirements: Recursively expand a component's requirements,
|
||||
# setting global properties BUILD_COMPONENTS & BUILD_COMPONENT_PATHS and
|
||||
# also invoking the components to call register_component() above,
|
||||
# which will add per-component global properties with dependencies, etc.
|
||||
function(expand_component_requirements component)
|
||||
get_property(build_components GLOBAL PROPERTY BUILD_COMPONENTS)
|
||||
if(${component} IN_LIST build_components)
|
||||
return() # already added this component
|
||||
get_property(seen_components GLOBAL PROPERTY SEEN_COMPONENTS)
|
||||
if(component IN_LIST seen_components)
|
||||
return() # already added, or in process of adding, this component
|
||||
endif()
|
||||
set_property(GLOBAL APPEND PROPERTY SEEN_COMPONENTS ${component})
|
||||
|
||||
find_component_path("${component}" "${ALL_COMPONENT_PATHS}" component_path)
|
||||
debug("Expanding dependencies of ${component} @ ${component_path}")
|
||||
if(NOT component_path)
|
||||
find_component_path("${component}" "${ALL_COMPONENTS}" "${ALL_COMPONENT_PATHS}" COMPONENT_PATH)
|
||||
debug("Expanding dependencies of ${component} @ ${COMPONENT_PATH}")
|
||||
if(NOT COMPONENT_PATH)
|
||||
set_property(GLOBAL APPEND PROPERTY COMPONENTS_NOT_FOUND ${component})
|
||||
return()
|
||||
endif()
|
||||
@ -138,43 +173,124 @@ function(expand_component_requirements component)
|
||||
unset(COMPONENT_REQUIRES)
|
||||
unset(COMPONENT_PRIV_REQUIRES)
|
||||
set(COMPONENT ${component})
|
||||
include(${component_path}/CMakeLists.txt)
|
||||
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_COMPONENT_PATHS ${component_path})
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_COMPONENTS ${component})
|
||||
include(${COMPONENT_PATH}/CMakeLists.txt)
|
||||
|
||||
get_property(requires GLOBAL PROPERTY "${component}_REQUIRES")
|
||||
get_property(requires_priv GLOBAL PROPERTY "${component}_PRIV_REQUIRES")
|
||||
foreach(req ${requires} ${requires_priv})
|
||||
|
||||
# Recurse dependencies first, so that they appear first in the list (when possible)
|
||||
foreach(req ${COMPONENT_REQUIRES_COMMON} ${requires} ${requires_priv})
|
||||
expand_component_requirements(${req})
|
||||
endforeach()
|
||||
|
||||
list(FIND TEST_COMPONENTS ${component} idx)
|
||||
|
||||
if(NOT idx EQUAL -1)
|
||||
list(GET TEST_COMPONENTS ${idx} test_component)
|
||||
list(GET TEST_COMPONENT_PATHS ${idx} test_component_path)
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_TEST_COMPONENTS ${test_component})
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_TEST_COMPONENT_PATHS ${test_component_path})
|
||||
endif()
|
||||
|
||||
# Now append this component to the full list (after its dependencies)
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_COMPONENT_PATHS ${COMPONENT_PATH})
|
||||
set_property(GLOBAL APPEND PROPERTY BUILD_COMPONENTS ${component})
|
||||
endfunction()
|
||||
|
||||
# filter_components_list: Filter the components included in the build
|
||||
# as specified by the user. Or, in the case of unit testing, filter out
|
||||
# the test components to be built.
|
||||
macro(filter_components_list)
|
||||
spaces2list(COMPONENTS)
|
||||
spaces2list(EXCLUDE_COMPONENTS)
|
||||
spaces2list(TEST_COMPONENTS)
|
||||
spaces2list(TEST_EXCLUDE_COMPONENTS)
|
||||
|
||||
list(LENGTH ALL_COMPONENTS all_components_length)
|
||||
math(EXPR all_components_length "${all_components_length} - 1")
|
||||
|
||||
foreach(component_idx RANGE 0 ${all_components_length})
|
||||
list(GET ALL_COMPONENTS ${component_idx} component)
|
||||
list(GET ALL_COMPONENT_PATHS ${component_idx} component_path)
|
||||
|
||||
if(COMPONENTS)
|
||||
if(${component} IN_LIST COMPONENTS)
|
||||
set(add_component 1)
|
||||
else()
|
||||
set(add_component 0)
|
||||
endif()
|
||||
else()
|
||||
set(add_component 1)
|
||||
|
||||
endif()
|
||||
|
||||
if(NOT ${component} IN_LIST EXCLUDE_COMPONENTS AND add_component EQUAL 1)
|
||||
list(APPEND components ${component})
|
||||
list(APPEND component_paths ${component_path})
|
||||
|
||||
if(TESTS_ALL EQUAL 1 OR TEST_COMPONENTS)
|
||||
if(NOT TESTS_ALL EQUAL 1 AND TEST_COMPONENTS)
|
||||
if(${component} IN_LIST TEST_COMPONENTS)
|
||||
set(add_test_component 1)
|
||||
else()
|
||||
set(add_test_component 0)
|
||||
endif()
|
||||
else()
|
||||
set(add_test_component 1)
|
||||
endif()
|
||||
|
||||
if(${component} IN_LIST ALL_TEST_COMPONENTS)
|
||||
if(NOT ${component} IN_LIST TEST_EXCLUDE_COMPONENTS AND add_test_component EQUAL 1)
|
||||
list(APPEND test_components ${component}_test)
|
||||
list(APPEND test_component_paths ${component_path}/test)
|
||||
|
||||
list(APPEND components ${component}_test)
|
||||
list(APPEND component_paths ${component_path}/test)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COMPONENTS ${components})
|
||||
|
||||
set(TEST_COMPONENTS ${test_components})
|
||||
set(TEST_COMPONENT_PATHS ${test_component_paths})
|
||||
|
||||
list(APPEND ALL_COMPONENTS "${TEST_COMPONENTS}")
|
||||
list(APPEND ALL_COMPONENT_PATHS "${TEST_COMPONENT_PATHS}")
|
||||
endmacro()
|
||||
|
||||
# Main functionality goes here
|
||||
|
||||
# Find every available component in COMPONENT_DIRS, save as ALL_COMPONENT_PATHS and ALL_COMPONENTS
|
||||
components_find_all("${COMPONENT_DIRS}" ALL_COMPONENT_PATHS ALL_COMPONENTS)
|
||||
components_find_all("${COMPONENT_DIRS}" ALL_COMPONENT_PATHS ALL_COMPONENTS ALL_TEST_COMPONENTS)
|
||||
|
||||
if(NOT COMPONENTS)
|
||||
set(COMPONENTS "${ALL_COMPONENTS}")
|
||||
endif()
|
||||
spaces2list(COMPONENTS)
|
||||
filter_components_list()
|
||||
|
||||
debug("ALL_COMPONENT_PATHS ${ALL_COMPONENT_PATHS}")
|
||||
debug("ALL_COMPONENTS ${ALL_COMPONENTS}")
|
||||
debug("ALL_TEST_COMPONENTS ${ALL_TEST_COMPONENTS}")
|
||||
|
||||
set_property(GLOBAL PROPERTY SEEN_COMPONENTS "") # anti-infinite-recursion
|
||||
set_property(GLOBAL PROPERTY BUILD_COMPONENTS "")
|
||||
set_property(GLOBAL PROPERTY BUILD_COMPONENT_PATHS "")
|
||||
set_property(GLOBAL PROPERTY BUILD_TEST_COMPONENTS "")
|
||||
set_property(GLOBAL PROPERTY BUILD_TEST_COMPONENT_PATHS "")
|
||||
set_property(GLOBAL PROPERTY COMPONENTS_NOT_FOUND "")
|
||||
|
||||
# Indicate that the component CMakeLists.txt is being included in the early expansion phase of the build,
|
||||
# and might not want to execute particular operations.
|
||||
set(CMAKE_BUILD_EARLY_EXPANSION 1)
|
||||
foreach(component ${COMPONENTS})
|
||||
debug("Expanding initial component ${component}")
|
||||
expand_component_requirements(${component})
|
||||
endforeach()
|
||||
unset(CMAKE_BUILD_EARLY_EXPANSION)
|
||||
|
||||
get_property(build_components GLOBAL PROPERTY BUILD_COMPONENTS)
|
||||
get_property(build_component_paths GLOBAL PROPERTY BUILD_COMPONENT_PATHS)
|
||||
get_property(build_test_components GLOBAL PROPERTY BUILD_TEST_COMPONENTS)
|
||||
get_property(build_test_component_paths GLOBAL PROPERTY BUILD_TEST_COMPONENT_PATHS)
|
||||
get_property(not_found GLOBAL PROPERTY COMPONENTS_NOT_FOUND)
|
||||
|
||||
debug("components in build: ${build_components}")
|
||||
@ -182,12 +298,14 @@ debug("components in build: ${build_component_paths}")
|
||||
debug("components not found: ${not_found}")
|
||||
|
||||
function(line contents)
|
||||
file(APPEND "${DEPENDENCIES_FILE}" "${contents}\n")
|
||||
file(APPEND "${DEPENDENCIES_FILE}.tmp" "${contents}\n")
|
||||
endfunction()
|
||||
|
||||
file(WRITE "${DEPENDENCIES_FILE}" "# Component requirements generated by expand_requirements.cmake\n\n")
|
||||
file(WRITE "${DEPENDENCIES_FILE}.tmp" "# Component requirements generated by expand_requirements.cmake\n\n")
|
||||
line("set(BUILD_COMPONENTS ${build_components})")
|
||||
line("set(BUILD_COMPONENT_PATHS ${build_component_paths})")
|
||||
line("set(BUILD_TEST_COMPONENTS ${build_test_components})")
|
||||
line("set(BUILD_TEST_COMPONENT_PATHS ${build_test_component_paths})")
|
||||
line("")
|
||||
|
||||
line("# get_component_requirements: Generated function to read the dependencies of a given component.")
|
||||
@ -214,3 +332,7 @@ endforeach()
|
||||
|
||||
line(" message(FATAL_ERROR \"Component not found: \${component}\")")
|
||||
line("endfunction()")
|
||||
|
||||
# only replace DEPENDENCIES_FILE if it has changed (prevents ninja/make build loops.)
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${DEPENDENCIES_FILE}.tmp" "${DEPENDENCIES_FILE}")
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E remove "${DEPENDENCIES_FILE}.tmp")
|
||||
|
4
tools/cmake/scripts/fail.cmake
Normal file
4
tools/cmake/scripts/fail.cmake
Normal file
@ -0,0 +1,4 @@
|
||||
# 'cmake -E' doesn't have a way to fail outright, so run this script
|
||||
# with 'cmake -P' to fail a build.
|
||||
message(FATAL_ERROR "Failing the build (see errors on lines above)")
|
||||
|
@ -109,7 +109,9 @@ function(git_describe _var _repo_dir)
|
||||
|
||||
execute_process(COMMAND
|
||||
"${GIT_EXECUTABLE}"
|
||||
describe
|
||||
"-C"
|
||||
${_repo_dir}
|
||||
describe --tag
|
||||
${hash}
|
||||
${ARGN}
|
||||
WORKING_DIRECTORY
|
||||
|
@ -1,7 +0,0 @@
|
||||
set(CMAKE_SYSTEM_NAME Generic)
|
||||
|
||||
set(CMAKE_C_COMPILER xtensa-esp32-elf-gcc)
|
||||
set(CMAKE_CXX_COMPILER xtensa-esp32-elf-g++)
|
||||
set(CMAKE_ASM_COMPILER xtensa-esp32-elf-gcc)
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "-nostdlib" CACHE STRING "Linker Base Flags")
|
@ -3,6 +3,5 @@ set(CMAKE_SYSTEM_NAME Generic)
|
||||
set(CMAKE_C_COMPILER xtensa-lx106-elf-gcc)
|
||||
set(CMAKE_CXX_COMPILER xtensa-lx106-elf-g++)
|
||||
set(CMAKE_ASM_COMPILER xtensa-lx106-elf-gcc)
|
||||
set(CMAKE_OBJCOPY_COMPILER xtensa-lx106-elf-objcopy)
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "-nostdlib" CACHE STRING "Linker Base Flags")
|
||||
|
@ -9,7 +9,7 @@
|
||||
#
|
||||
function(set_default variable default_value)
|
||||
if(NOT ${variable})
|
||||
if($ENV{${variable}})
|
||||
if(DEFINED ENV{${variable}} AND NOT "$ENV{${variable}}" STREQUAL "")
|
||||
set(${variable} $ENV{${variable}} PARENT_SCOPE)
|
||||
else()
|
||||
set(${variable} ${default_value} PARENT_SCOPE)
|
||||
@ -31,7 +31,6 @@ function(spaces2list variable_name)
|
||||
set("${variable_name}" "${tmp}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
|
||||
# lines2list
|
||||
#
|
||||
# Take a variable with multiple lines of output in it, convert it
|
||||
@ -74,30 +73,6 @@ function(move_if_different source destination)
|
||||
|
||||
endfunction()
|
||||
|
||||
|
||||
# add_compile_options variant for C++ code only
|
||||
#
|
||||
# This adds global options, set target properties for
|
||||
# component-specific flags
|
||||
function(add_cxx_compile_options)
|
||||
foreach(option ${ARGV})
|
||||
# note: the Visual Studio Generator doesn't support this...
|
||||
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:${option}>)
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
# add_compile_options variant for C code only
|
||||
#
|
||||
# This adds global options, set target properties for
|
||||
# component-specific flags
|
||||
function(add_c_compile_options)
|
||||
foreach(option ${ARGV})
|
||||
# note: the Visual Studio Generator doesn't support this...
|
||||
add_compile_options($<$<COMPILE_LANGUAGE:C>:${option}>)
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
|
||||
# target_add_binary_data adds binary data into the built target,
|
||||
# by converting it to a generated source file which is then compiled
|
||||
# to a binary object as part of the build
|
||||
@ -132,7 +107,7 @@ endmacro()
|
||||
# Append a single line to the file specified
|
||||
# The line ending is determined by the host OS
|
||||
function(file_append_line file line)
|
||||
if(ENV{MSYSTEM} OR CMAKE_HOST_WIN32)
|
||||
if(DEFINED ENV{MSYSTEM} OR CMAKE_HOST_WIN32)
|
||||
set(line_ending "\r\n")
|
||||
else() # unix
|
||||
set(line_ending "\n")
|
||||
@ -179,3 +154,14 @@ function(make_json_list list variable)
|
||||
string(REPLACE ";" "\", \"" result "[ \"${list}\" ]")
|
||||
set("${variable}" "${result}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# add_prefix
|
||||
#
|
||||
# Adds a prefix to each item in the specified list.
|
||||
#
|
||||
function(add_prefix var prefix)
|
||||
foreach(elm ${ARGN})
|
||||
list(APPEND newlist "${prefix}${elm}")
|
||||
endforeach()
|
||||
set(${var} "${newlist}" PARENT_SCOPE)
|
||||
endfunction()
|
224
tools/idf.py
224
tools/idf.py
@ -21,6 +21,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# WARNING: we don't check for Python build-time dependencies until
|
||||
# check_environment() function below. If possible, avoid importing
|
||||
# any external libraries here - put in external script, or import in
|
||||
# their specific function instead.
|
||||
import sys
|
||||
import argparse
|
||||
import os
|
||||
@ -41,7 +46,7 @@ class FatalError(RuntimeError):
|
||||
PYTHON=sys.executable
|
||||
|
||||
# note: os.environ changes don't automatically propagate to child processes,
|
||||
# you have to pass this in explicitly
|
||||
# you have to pass env=os.environ explicitly anywhere that we create a process
|
||||
os.environ["PYTHON"]=sys.executable
|
||||
|
||||
# Make flavors, across the various kinds of Windows environments & POSIX...
|
||||
@ -71,14 +76,13 @@ def _run_tool(tool_name, args, cwd):
|
||||
return arg
|
||||
display_args = " ".join(quote_arg(arg) for arg in args)
|
||||
print("Running %s in directory %s" % (tool_name, quote_arg(cwd)))
|
||||
print('Executing "%s"...' % display_args)
|
||||
print('Executing "%s"...' % str(display_args))
|
||||
try:
|
||||
# Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup
|
||||
subprocess.check_call(args, env=os.environ, cwd=cwd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode))
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""
|
||||
Verify the environment contains the top-level tools we need to operate
|
||||
@ -95,8 +99,18 @@ def check_environment():
|
||||
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. Using the environment variable directory, but results may be unexpected..."
|
||||
% (set_idf_path, detected_idf_path))
|
||||
else:
|
||||
print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
|
||||
os.environ["IDF_PATH"] = detected_idf_path
|
||||
|
||||
# check Python dependencies
|
||||
print("Checking Python dependencies...")
|
||||
try:
|
||||
subprocess.check_call([ os.environ["PYTHON"],
|
||||
os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")],
|
||||
env=os.environ)
|
||||
except subprocess.CalledProcessError:
|
||||
raise SystemExit(1)
|
||||
|
||||
def executable_exists(args):
|
||||
try:
|
||||
subprocess.check_output(args)
|
||||
@ -136,18 +150,21 @@ def _ensure_build_directory(args, always_run_cmake=False):
|
||||
# Verify/create the build directory
|
||||
build_dir = args.build_dir
|
||||
if not os.path.isdir(build_dir):
|
||||
os.mkdir(build_dir)
|
||||
os.makedirs(build_dir)
|
||||
cache_path = os.path.join(build_dir, "CMakeCache.txt")
|
||||
if not os.path.exists(cache_path) or always_run_cmake:
|
||||
if args.generator is None:
|
||||
args.generator = detect_cmake_generator()
|
||||
try:
|
||||
cmake_args = ["cmake", "-G", args.generator]
|
||||
cmake_args = ["cmake", "-G", args.generator, "-DPYTHON_DEPS_CHECKED=1"]
|
||||
if not args.no_warnings:
|
||||
cmake_args += [ "--warn-uninitialized" ]
|
||||
if args.no_ccache:
|
||||
cmake_args += [ "-DCCACHE_DISABLE=1" ]
|
||||
if args.define_cache_entry:
|
||||
cmake_args += ["-D" + d for d in args.define_cache_entry]
|
||||
cmake_args += [ project_dir]
|
||||
|
||||
_run_tool("cmake", cmake_args, cwd=args.build_dir)
|
||||
except:
|
||||
# don't allow partially valid CMakeCache.txt files,
|
||||
@ -204,6 +221,7 @@ def build_target(target_name, args):
|
||||
"""
|
||||
_ensure_build_directory(args)
|
||||
generator_cmd = GENERATOR_CMDS[args.generator]
|
||||
|
||||
if not args.no_ccache:
|
||||
# Setting CCACHE_BASEDIR & CCACHE_NO_HASHDIR ensures that project paths aren't stored in the ccache entries
|
||||
# (this means ccache hits can be shared between different projects. It may mean that some debug information
|
||||
@ -221,9 +239,10 @@ def build_target(target_name, args):
|
||||
|
||||
def _get_esptool_args(args):
|
||||
esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py")
|
||||
if args.port is None:
|
||||
args.port = get_default_serial_port()
|
||||
result = [ PYTHON, esptool_path ]
|
||||
if args.port is not None:
|
||||
result += [ "-p", args.port ]
|
||||
result += [ "-p", args.port ]
|
||||
result += [ "-b", str(args.baud) ]
|
||||
return result
|
||||
|
||||
@ -241,17 +260,17 @@ def flash(action, args):
|
||||
esptool_args += [ "write_flash", "@"+flasher_args_path ]
|
||||
_run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
|
||||
def erase_flash(action, args):
|
||||
esptool_args = _get_esptool_args(args)
|
||||
esptool_args += [ "erase_flash" ]
|
||||
_run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
|
||||
def monitor(action, args):
|
||||
"""
|
||||
Run idf_monitor.py to watch build output
|
||||
"""
|
||||
if args.port is None:
|
||||
args.port = get_default_serial_port()
|
||||
desc_path = os.path.join(args.build_dir, "project_description.json")
|
||||
if not os.path.exists(desc_path):
|
||||
_ensure_build_directory(args)
|
||||
@ -267,9 +286,13 @@ def monitor(action, args):
|
||||
monitor_args += [ "-p", args.port ]
|
||||
monitor_args += [ "-b", project_desc["monitor_baud"] ]
|
||||
monitor_args += [ elf_file ]
|
||||
if "MSYSTEM" is os.environ:
|
||||
|
||||
idf_py = [ PYTHON ] + get_commandline_options() # commands to re-run idf.py
|
||||
monitor_args += [ "-m", " ".join("'%s'" % a for a in idf_py) ]
|
||||
|
||||
if "MSYSTEM" in os.environ:
|
||||
monitor_args = [ "winpty" ] + monitor_args
|
||||
_run_tool("idf_monitor", monitor_args, args.build_dir)
|
||||
_run_tool("idf_monitor", monitor_args, args.project_dir)
|
||||
|
||||
|
||||
def clean(action, args):
|
||||
@ -305,43 +328,157 @@ def fullclean(action, args):
|
||||
else:
|
||||
os.remove(f)
|
||||
|
||||
def print_closing_message(args):
|
||||
# print a closing message of some kind
|
||||
#
|
||||
if "flash" in str(args.actions):
|
||||
print("Done")
|
||||
return
|
||||
|
||||
# Otherwise, if we built any binaries print a message about
|
||||
# how to flash them
|
||||
def print_flashing_message(title, key):
|
||||
print("\n%s build complete. To flash, run this command:" % title)
|
||||
|
||||
with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
|
||||
flasher_args = json.load(f)
|
||||
|
||||
def flasher_path(f):
|
||||
return os.path.relpath(os.path.join(args.build_dir, f))
|
||||
|
||||
if key != "project": # flashing a single item
|
||||
cmd = ""
|
||||
if key == "bootloader": # bootloader needs --flash-mode, etc to be passed in
|
||||
cmd = " ".join(flasher_args["write_flash_args"]) + " "
|
||||
|
||||
cmd += flasher_args[key]["offset"] + " "
|
||||
cmd += flasher_path(flasher_args[key]["file"])
|
||||
else: # flashing the whole project
|
||||
cmd = " ".join(flasher_args["write_flash_args"]) + " "
|
||||
flash_items = sorted(((o,f) for (o,f) in flasher_args["flash_files"].items() if len(o) > 0),
|
||||
key = lambda x: int(x[0], 0))
|
||||
for o,f in flash_items:
|
||||
cmd += o + " " + flasher_path(f) + " "
|
||||
|
||||
print("%s -p %s -b %s write_flash %s" % (
|
||||
os.path.relpath("%s/components/esptool_py/esptool/esptool.py" % os.environ["IDF_PATH"]),
|
||||
args.port or "(PORT)",
|
||||
args.baud,
|
||||
cmd.strip()))
|
||||
print("or run 'idf.py -p %s %s'" % (args.port or "(PORT)", key + "-flash" if key != "project" else "flash",))
|
||||
|
||||
if "all" in args.actions or "build" in args.actions:
|
||||
print_flashing_message("Project", "project")
|
||||
else:
|
||||
if "app" in args.actions:
|
||||
print_flashing_message("App", "app")
|
||||
if "partition_table" in args.actions:
|
||||
print_flashing_message("Partition Table", "partition_table")
|
||||
if "bootloader" in args.actions:
|
||||
print_flashing_message("Bootloader", "bootloader")
|
||||
|
||||
ACTIONS = {
|
||||
# action name : ( function (or alias), dependencies, order-only dependencies )
|
||||
"all" : ( build_target, [], [ "reconfigure", "menuconfig", "clean", "fullclean" ] ),
|
||||
"build": ( "all", [], [] ), # build is same as 'all' target
|
||||
"clean": ( clean, [], [ "fullclean" ] ),
|
||||
"fullclean": ( fullclean, [], [] ),
|
||||
"reconfigure": ( reconfigure, [], [] ),
|
||||
"menuconfig": ( build_target, [], [] ),
|
||||
"size": ( build_target, [], [ "app" ] ),
|
||||
"size-components": ( build_target, [], [ "app" ] ),
|
||||
"size-files": ( build_target, [], [ "app" ] ),
|
||||
"bootloader": ( build_target, [], [] ),
|
||||
"bootloader-clean": ( build_target, [], [] ),
|
||||
"bootloader-flash": ( flash, [ "bootloader" ], [] ),
|
||||
"app": ( build_target, [], [ "clean", "fullclean", "reconfigure" ] ),
|
||||
"app-flash": ( flash, [], [ "app" ]),
|
||||
"partition_table": ( build_target, [], [ "reconfigure" ] ),
|
||||
"partition_table-flash": ( flash, [ "partition_table" ], []),
|
||||
"flash": ( flash, [ "all" ], [ ] ),
|
||||
"erase_flash": ( erase_flash, [], []),
|
||||
"monitor": ( monitor, [], [ "flash", "partition_table-flash", "bootloader-flash", "app-flash" ]),
|
||||
"all" : ( build_target, [], [ "reconfigure", "menuconfig", "clean", "fullclean" ] ),
|
||||
"build": ( "all", [], [] ), # build is same as 'all' target
|
||||
"clean": ( clean, [], [ "fullclean" ] ),
|
||||
"fullclean": ( fullclean, [], [] ),
|
||||
"reconfigure": ( reconfigure, [], [ "menuconfig" ] ),
|
||||
"menuconfig": ( build_target, [], [] ),
|
||||
"defconfig": ( build_target, [], [] ),
|
||||
"confserver": ( build_target, [], [] ),
|
||||
"size": ( build_target, [ "app" ], [] ),
|
||||
"size-components": ( build_target, [ "app" ], [] ),
|
||||
"size-files": ( build_target, [ "app" ], [] ),
|
||||
"bootloader": ( build_target, [], [] ),
|
||||
"bootloader-clean": ( build_target, [], [] ),
|
||||
"bootloader-flash": ( flash, [ "bootloader" ], [ "erase_flash"] ),
|
||||
"app": ( build_target, [], [ "clean", "fullclean", "reconfigure" ] ),
|
||||
"app-flash": ( flash, [ "app" ], [ "erase_flash"]),
|
||||
"partition_table": ( build_target, [], [ "reconfigure" ] ),
|
||||
"partition_table-flash": ( flash, [ "partition_table" ], [ "erase_flash" ]),
|
||||
"flash": ( flash, [ "all" ], [ "erase_flash" ] ),
|
||||
"erase_flash": ( erase_flash, [], []),
|
||||
"monitor": ( monitor, [], [ "flash", "partition_table-flash", "bootloader-flash", "app-flash" ]),
|
||||
}
|
||||
|
||||
def get_commandline_options():
|
||||
""" Return all the command line options up to but not including the action """
|
||||
result = []
|
||||
for a in sys.argv:
|
||||
if a in ACTIONS.keys():
|
||||
break
|
||||
else:
|
||||
result.append(a)
|
||||
return result
|
||||
|
||||
def get_default_serial_port():
|
||||
""" Return a default serial port. esptool can do this (smarter), but it can create
|
||||
inconsistencies where esptool.py uses one port and idf_monitor uses another.
|
||||
|
||||
Same logic as esptool.py search order, reverse sort by name and choose the first port.
|
||||
"""
|
||||
# Import is done here in order to move it after the check_environment() ensured that pyserial has been installed
|
||||
import serial.tools.list_ports
|
||||
|
||||
ports = list(reversed(sorted(
|
||||
p.device for p in serial.tools.list_ports.comports() )))
|
||||
try:
|
||||
print ("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0])
|
||||
return ports[0]
|
||||
except IndexError:
|
||||
raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
|
||||
|
||||
# Import the actions, arguments extension file
|
||||
if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")):
|
||||
sys.path.append(os.getcwd())
|
||||
try:
|
||||
from idf_ext import add_action_extensions, add_argument_extensions
|
||||
except ImportError as e:
|
||||
print("Error importing extension file idf_ext.py. Skipping.")
|
||||
print("Please make sure that it contains implementations (even if they're empty implementations) of")
|
||||
print("add_action_extensions and add_argument_extensions.")
|
||||
|
||||
def main():
|
||||
if sys.version_info[0] != 2 or sys.version_info[1] != 7:
|
||||
print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems "
|
||||
"you encounter. Search for 'Setting the Python Interpreter' in the ESP-IDF docs if you want to use "
|
||||
"Python 2.7." % sys.version_info[:3])
|
||||
|
||||
# Add actions extensions
|
||||
try:
|
||||
add_action_extensions({
|
||||
"build_target": build_target,
|
||||
"reconfigure" : reconfigure,
|
||||
"flash" : flash,
|
||||
"monitor" : monitor,
|
||||
"clean" : clean,
|
||||
"fullclean" : fullclean
|
||||
}, ACTIONS)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
parser = argparse.ArgumentParser(description='ESP-IDF build management tool')
|
||||
parser.add_argument('-p', '--port', help="Serial port", default=None)
|
||||
parser.add_argument('-b', '--baud', help="Baud rate", default=460800)
|
||||
parser.add_argument('-p', '--port', help="Serial port",
|
||||
default=os.environ.get('ESPPORT', None))
|
||||
parser.add_argument('-b', '--baud', help="Baud rate",
|
||||
default=os.environ.get('ESPBAUD', 460800))
|
||||
parser.add_argument('-C', '--project-dir', help="Project directory", default=os.getcwd())
|
||||
parser.add_argument('-B', '--build-dir', help="Build directory", default=None)
|
||||
parser.add_argument('-G', '--generator', help="Cmake generator", choices=GENERATOR_CMDS.keys())
|
||||
parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true")
|
||||
parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true")
|
||||
parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+')
|
||||
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.", action="store_true")
|
||||
parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+',
|
||||
choices=ACTIONS.keys())
|
||||
|
||||
# Add arguments extensions
|
||||
try:
|
||||
add_argument_extensions(parser)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
check_environment()
|
||||
@ -374,14 +511,29 @@ def main():
|
||||
|
||||
completed_actions.add(action)
|
||||
|
||||
while len(args.actions) > 0:
|
||||
execute_action(args.actions[0], args.actions[1:])
|
||||
args.actions.pop(0)
|
||||
actions = list(args.actions)
|
||||
while len(actions) > 0:
|
||||
execute_action(actions[0], actions[1:])
|
||||
actions.pop(0)
|
||||
|
||||
print_closing_message(args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
# On MSYS2 we need to run idf.py with "winpty" in order to be able to cancel the subprocesses properly on
|
||||
# keyboard interrupt (CTRL+C).
|
||||
# Using an own global variable for indicating that we are running with "winpty" seems to be the most suitable
|
||||
# option as os.environment['_'] contains "winpty" only when it is run manually from console.
|
||||
WINPTY_VAR = 'WINPTY'
|
||||
WINPTY_EXE = 'winpty'
|
||||
if ('MSYSTEM' in os.environ) and (not os.environ['_'].endswith(WINPTY_EXE) and WINPTY_VAR not in os.environ):
|
||||
os.environ[WINPTY_VAR] = '1' # the value is of no interest to us
|
||||
# idf.py calls itself with "winpty" and WINPTY global variable set
|
||||
ret = subprocess.call([WINPTY_EXE, sys.executable] + sys.argv, env=os.environ)
|
||||
if ret:
|
||||
raise SystemExit(ret)
|
||||
else:
|
||||
main()
|
||||
except FatalError as e:
|
||||
print(e)
|
||||
sys.exit(2)
|
||||
|
4
tools/kconfig/.gitignore
vendored
4
tools/kconfig/.gitignore
vendored
@ -16,7 +16,9 @@ gconf.glade.h
|
||||
# configuration programs
|
||||
#
|
||||
conf
|
||||
conf-idf
|
||||
mconf
|
||||
mconf-idf
|
||||
nconf
|
||||
qconf
|
||||
gconf
|
||||
@ -24,7 +26,9 @@ kxgettext
|
||||
|
||||
# configuration programs, Windows
|
||||
conf.exe
|
||||
conf-idf.exe
|
||||
mconf.exe
|
||||
mconf-idf.exe
|
||||
nconf.exe
|
||||
qconf.exe
|
||||
gconf.exe
|
||||
|
@ -43,7 +43,7 @@ endif
|
||||
endif # MING32
|
||||
endif # MSYSTEM
|
||||
|
||||
default: mconf conf
|
||||
default: mconf-idf conf-idf
|
||||
|
||||
xconfig: qconf
|
||||
$< $(silent) $(Kconfig)
|
||||
@ -51,41 +51,41 @@ xconfig: qconf
|
||||
gconfig: gconf
|
||||
$< $(silent) $(Kconfig)
|
||||
|
||||
menuconfig: mconf
|
||||
menuconfig: mconf-idf
|
||||
$< $(silent) $(Kconfig)
|
||||
|
||||
config: conf
|
||||
config: conf-idf
|
||||
$< $(silent) --oldaskconfig $(Kconfig)
|
||||
|
||||
nconfig: nconf
|
||||
$< $(silent) $(Kconfig)
|
||||
|
||||
silentoldconfig: conf
|
||||
silentoldconfig: conf-idf
|
||||
mkdir -p include/config include/generated
|
||||
$< $(silent) --$@ $(Kconfig)
|
||||
|
||||
localyesconfig localmodconfig: streamline_config.pl conf
|
||||
localyesconfig localmodconfig: streamline_config.pl conf-idf
|
||||
mkdir -p include/config include/generated
|
||||
perl $< --$@ . $(Kconfig) > .tmp.config
|
||||
if [ -f .config ]; then \
|
||||
cmp -s .tmp.config .config || \
|
||||
(mv -f .config .config.old.1; \
|
||||
mv -f .tmp.config .config; \
|
||||
conf $(silent) --silentoldconfig $(Kconfig); \
|
||||
conf-idf $(silent) --silentoldconfig $(Kconfig); \
|
||||
mv -f .config.old.1 .config.old) \
|
||||
else \
|
||||
mv -f .tmp.config .config; \
|
||||
conf $(silent) --silentoldconfig $(Kconfig); \
|
||||
conf-idf $(silent) --silentoldconfig $(Kconfig); \
|
||||
fi
|
||||
rm -f .tmp.config
|
||||
|
||||
|
||||
# These targets map 1:1 to the commandline options of 'conf'
|
||||
# These targets map 1:1 to the commandline options of 'conf-idf'
|
||||
simple-targets := oldconfig allnoconfig allyesconfig allmodconfig \
|
||||
alldefconfig randconfig listnewconfig olddefconfig
|
||||
PHONY += $(simple-targets)
|
||||
|
||||
$(simple-targets): conf
|
||||
$(simple-targets): conf-idf
|
||||
$< $(silent) --$@ $(Kconfig)
|
||||
|
||||
PHONY += oldnoconfig savedefconfig defconfig
|
||||
@ -95,10 +95,10 @@ PHONY += oldnoconfig savedefconfig defconfig
|
||||
# counter-intuitive name.
|
||||
oldnoconfig: olddefconfig
|
||||
|
||||
savedefconfig: conf
|
||||
savedefconfig: conf-idf
|
||||
$< $(silent) --$@=defconfig $(Kconfig)
|
||||
|
||||
defconfig: conf
|
||||
defconfig: conf-idf
|
||||
ifeq ($(KBUILD_DEFCONFIG),)
|
||||
$< $(silent) --defconfig $(Kconfig)
|
||||
else
|
||||
@ -111,12 +111,12 @@ else
|
||||
endif
|
||||
endif
|
||||
|
||||
%_defconfig: conf
|
||||
%_defconfig: conf-idf
|
||||
$< $(silent) --defconfig=arch/$(SRCARCH)/configs/$@ $(Kconfig)
|
||||
|
||||
configfiles=$(wildcard $(srctree)/kernel/configs/$@ $(srctree)/arch/$(SRCARCH)/configs/$@)
|
||||
|
||||
%.config: conf
|
||||
%.config: conf-idf
|
||||
$(if $(call configfiles),, $(error No configuration exists for this target on this architecture))
|
||||
$(CONFIG_SHELL) $(srctree)/scripts/kconfig/merge_config.sh -m .config $(configfiles)
|
||||
+yes "" | $(MAKE) -f $(srctree)/Makefile oldconfig
|
||||
@ -165,7 +165,7 @@ check-lxdialog := $(SRCDIR)/lxdialog/check-lxdialog.sh
|
||||
# Use recursively expanded variables so we do not call gcc unless
|
||||
# we really need to do so. (Do not call gcc as part of make mrproper)
|
||||
CFLAGS += $(shell $(CONFIG_SHELL) $(check-lxdialog) -ccflags) \
|
||||
-DLOCALE -MD
|
||||
-DLOCALE -MMD
|
||||
|
||||
%.o: $(SRCDIR)/%.c
|
||||
$(CC) -c $(CFLAGS) $(CPPFLAGS) $< -o $@
|
||||
@ -178,10 +178,10 @@ lxdialog/%.o: $(SRCDIR)/lxdialog/%.c
|
||||
|
||||
# ===========================================================================
|
||||
# Shared Makefile for the various kconfig executables:
|
||||
# conf: Used for defconfig, oldconfig and related targets
|
||||
# conf-idf: Used for defconfig, oldconfig and related targets
|
||||
# nconf: Used for the nconfig target.
|
||||
# Utilizes ncurses
|
||||
# mconf: Used for the menuconfig target
|
||||
# mconf-idf: Used for the menuconfig target
|
||||
# Utilizes the lxdialog package
|
||||
# qconf: Used for the xconfig target
|
||||
# Based on Qt which needs to be installed to compile it
|
||||
@ -200,14 +200,15 @@ qconf-cxxobjs := qconf.o
|
||||
qconf-objs := zconf.tab.o
|
||||
gconf-objs := gconf.o zconf.tab.o
|
||||
|
||||
hostprogs-y := conf nconf mconf kxgettext qconf gconf
|
||||
hostprogs-y := conf-idf nconf mconf-idf kxgettext qconf gconf
|
||||
|
||||
all-objs := $(conf-objs) $(mconf-objs) $(lxdialog)
|
||||
all-deps := $(all-objs:.o=.d)
|
||||
|
||||
clean-files := qconf.moc .tmp_qtcheck .tmp_gtkcheck
|
||||
clean-files += zconf.tab.c zconf.lex.c zconf.hash.c gconf.glade.h
|
||||
clean-files += $(all-objs) $(all-deps) conf mconf
|
||||
clean-files += $(all-objs) $(all-deps) conf-idf mconf-idf conf mconf
|
||||
# (note: cleans both mconf & conf (old names) and conf-idf & mconf-idf (new names))
|
||||
|
||||
# Check that we have the required ncurses stuff installed for lxdialog (menuconfig)
|
||||
PHONY += dochecklxdialog
|
||||
@ -324,16 +325,16 @@ gconf.glade.h: gconf.glade
|
||||
gconf.glade
|
||||
|
||||
|
||||
mconf: lxdialog $(mconf-objs)
|
||||
mconf-idf: lxdialog $(mconf-objs)
|
||||
$(CC) -o $@ $(mconf-objs) $(LOADLIBES_mconf)
|
||||
|
||||
conf: $(conf-objs)
|
||||
conf-idf: $(conf-objs)
|
||||
$(CC) -o $@ $(conf-objs) $(LOADLIBES_conf)
|
||||
|
||||
zconf.tab.c: zconf.lex.c
|
||||
|
||||
zconf.lex.c: $(SRCDIR)/zconf.l
|
||||
flex -L -P zconf -o zconf.lex.c $<
|
||||
flex -L -Pzconf -ozconf.lex.c $<
|
||||
|
||||
zconf.hash.c: $(SRCDIR)/zconf.gperf
|
||||
# strip CRs on Windows systems where gperf will otherwise barf on them
|
||||
|
@ -980,11 +980,17 @@ static int handle_exit(void)
|
||||
}
|
||||
/* fall through */
|
||||
case -1:
|
||||
if (!silent)
|
||||
if (!silent) {
|
||||
const char *is_cmake = getenv("IDF_CMAKE");
|
||||
const char *build_msg;
|
||||
if (is_cmake && is_cmake[0] == 'y')
|
||||
build_msg = _("Ready to use CMake (or 'idf.py build') to build the project.");
|
||||
else
|
||||
build_msg = _("Execute 'make' to start the build or try 'make help'.");
|
||||
printf(_("\n\n"
|
||||
"*** End of the configuration.\n"
|
||||
"*** Execute 'make' to start the build or try 'make help'."
|
||||
"\n\n"));
|
||||
"*** End of the configuration.\n"
|
||||
"*** %s\n\n"), build_msg);
|
||||
}
|
||||
res = 0;
|
||||
break;
|
||||
default:
|
||||
|
@ -20,6 +20,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
@ -29,9 +30,13 @@ import json
|
||||
|
||||
import gen_kconfig_doc
|
||||
import kconfiglib
|
||||
import pprint
|
||||
|
||||
__version__ = "0.1"
|
||||
|
||||
if not "IDF_CMAKE" in os.environ:
|
||||
os.environ["IDF_CMAKE"] = ""
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='confgen.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
|
||||
|
||||
@ -45,10 +50,6 @@ def main():
|
||||
nargs='?',
|
||||
default=None)
|
||||
|
||||
parser.add_argument('--create-config-if-missing',
|
||||
help='If set, a new config file will be saved if the old one is not found',
|
||||
action='store_true')
|
||||
|
||||
parser.add_argument('--kconfig',
|
||||
help='KConfig file with config item definitions',
|
||||
required=True)
|
||||
@ -65,7 +66,7 @@ def main():
|
||||
|
||||
for fmt, filename in args.output:
|
||||
if not fmt in OUTPUT_FORMATS.keys():
|
||||
print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS))
|
||||
print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS.keys()))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
@ -78,6 +79,8 @@ def main():
|
||||
os.environ[name] = value
|
||||
|
||||
config = kconfiglib.Kconfig(args.kconfig)
|
||||
config.disable_redun_warnings()
|
||||
config.disable_override_warnings()
|
||||
|
||||
if args.defaults is not None:
|
||||
# always load defaults first, so any items which are not defined in that config
|
||||
@ -86,26 +89,22 @@ def main():
|
||||
raise RuntimeError("Defaults file not found: %s" % args.defaults)
|
||||
config.load_config(args.defaults)
|
||||
|
||||
if args.config is not None:
|
||||
if os.path.exists(args.config):
|
||||
config.load_config(args.config)
|
||||
elif args.create_config_if_missing:
|
||||
print("Creating config file %s..." % args.config)
|
||||
config.write_config(args.config)
|
||||
elif args.default is None:
|
||||
raise RuntimeError("Config file not found: %s" % args.config)
|
||||
# If config file previously exists, load it
|
||||
if args.config and os.path.exists(args.config):
|
||||
config.load_config(args.config, replace=False)
|
||||
|
||||
for output_type, filename in args.output:
|
||||
temp_file = tempfile.mktemp(prefix="confgen_tmp")
|
||||
# Output the files specified in the arguments
|
||||
for output_type, filename in args.output:
|
||||
temp_file = tempfile.mktemp(prefix="confgen_tmp")
|
||||
try:
|
||||
output_function = OUTPUT_FORMATS[output_type]
|
||||
output_function(config, temp_file)
|
||||
update_if_changed(temp_file, filename)
|
||||
finally:
|
||||
try:
|
||||
output_function = OUTPUT_FORMATS[output_type]
|
||||
output_function(config, temp_file)
|
||||
update_if_changed(temp_file, filename)
|
||||
finally:
|
||||
try:
|
||||
os.remove(temp_file)
|
||||
except OSError:
|
||||
pass
|
||||
os.remove(temp_file)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def write_config(config, filename):
|
||||
@ -150,9 +149,8 @@ def write_cmake(config, filename):
|
||||
prefix, sym.name, val))
|
||||
config.walk_menu(write_node)
|
||||
|
||||
def write_json(config, filename):
|
||||
def get_json_values(config):
|
||||
config_dict = {}
|
||||
|
||||
def write_node(node):
|
||||
sym = node.item
|
||||
if not isinstance(sym, kconfiglib.Symbol):
|
||||
@ -168,12 +166,98 @@ def write_json(config, filename):
|
||||
val = int(val)
|
||||
config_dict[sym.name] = val
|
||||
config.walk_menu(write_node)
|
||||
return config_dict
|
||||
|
||||
def write_json(config, filename):
|
||||
config_dict = get_json_values(config)
|
||||
with open(filename, "w") as f:
|
||||
json.dump(config_dict, f, indent=4, sort_keys=True)
|
||||
|
||||
def write_json_menus(config, filename):
|
||||
result = [] # root level items
|
||||
node_lookup = {} # lookup from MenuNode to an item in result
|
||||
|
||||
def write_node(node):
|
||||
try:
|
||||
json_parent = node_lookup[node.parent]["children"]
|
||||
except KeyError:
|
||||
assert not node.parent in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
|
||||
json_parent = result # root level node
|
||||
|
||||
# node.kconfig.y means node has no dependency,
|
||||
if node.dep is node.kconfig.y:
|
||||
depends = None
|
||||
else:
|
||||
depends = kconfiglib.expr_str(node.dep)
|
||||
|
||||
try:
|
||||
is_menuconfig = node.is_menuconfig
|
||||
except AttributeError:
|
||||
is_menuconfig = False
|
||||
|
||||
new_json = None
|
||||
if node.item == kconfiglib.MENU or is_menuconfig:
|
||||
new_json = { "type" : "menu",
|
||||
"title" : node.prompt[0],
|
||||
"depends_on": depends,
|
||||
"children": []
|
||||
}
|
||||
if is_menuconfig:
|
||||
sym = node.item
|
||||
new_json["name"] = sym.name
|
||||
new_json["help"] = node.help
|
||||
new_json["is_menuconfig"] = is_menuconfig
|
||||
greatest_range = None
|
||||
if len(sym.ranges) > 0:
|
||||
# Note: Evaluating the condition using kconfiglib's expr_value
|
||||
# should have one condition which is true
|
||||
for min_range, max_range, cond_expr in sym.ranges:
|
||||
if kconfiglib.expr_value(cond_expr):
|
||||
greatest_range = [min_range, max_range]
|
||||
new_json["range"] = greatest_range
|
||||
|
||||
elif isinstance(node.item, kconfiglib.Symbol):
|
||||
sym = node.item
|
||||
greatest_range = None
|
||||
if len(sym.ranges) > 0:
|
||||
# Note: Evaluating the condition using kconfiglib's expr_value
|
||||
# should have one condition which is true
|
||||
for min_range, max_range, cond_expr in sym.ranges:
|
||||
if kconfiglib.expr_value(cond_expr):
|
||||
greatest_range = [int(min_range.str_value), int(max_range.str_value)]
|
||||
|
||||
new_json = {
|
||||
"type" : kconfiglib.TYPE_TO_STR[sym.type],
|
||||
"name" : sym.name,
|
||||
"title": node.prompt[0] if node.prompt else None,
|
||||
"depends_on" : depends,
|
||||
"help": node.help,
|
||||
"range" : greatest_range,
|
||||
"children": [],
|
||||
}
|
||||
elif isinstance(node.item, kconfiglib.Choice):
|
||||
choice = node.item
|
||||
new_json = {
|
||||
"type": "choice",
|
||||
"title": node.prompt[0],
|
||||
"name": choice.name,
|
||||
"depends_on" : depends,
|
||||
"help": node.help,
|
||||
"children": []
|
||||
}
|
||||
|
||||
if new_json:
|
||||
json_parent.append(new_json)
|
||||
node_lookup[node] = new_json
|
||||
|
||||
config.walk_menu(write_node)
|
||||
with open(filename, "w") as f:
|
||||
f.write(json.dumps(result, sort_keys=True, indent=4))
|
||||
|
||||
def update_if_changed(source, destination):
|
||||
with open(source, "r") as f:
|
||||
source_contents = f.read()
|
||||
|
||||
if os.path.exists(destination):
|
||||
with open(destination, "r") as f:
|
||||
dest_contents = f.read()
|
||||
@ -190,6 +274,7 @@ OUTPUT_FORMATS = {
|
||||
"cmake" : write_cmake,
|
||||
"docs" : gen_kconfig_doc.write_docs,
|
||||
"json" : write_json,
|
||||
"json_menus" : write_json_menus,
|
||||
}
|
||||
|
||||
class FatalError(RuntimeError):
|
||||
|
185
tools/kconfig_new/confserver.py
Executable file
185
tools/kconfig_new/confserver.py
Executable file
@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Long-running server process uses stdin & stdout to communicate JSON
|
||||
# with a caller
|
||||
#
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import json
|
||||
import kconfiglib
|
||||
import os
|
||||
import sys
|
||||
import confgen
|
||||
from confgen import FatalError, __version__
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='confserver.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
|
||||
|
||||
parser.add_argument('--config',
|
||||
help='Project configuration settings',
|
||||
required=True)
|
||||
|
||||
parser.add_argument('--kconfig',
|
||||
help='KConfig file with config item definitions',
|
||||
required=True)
|
||||
|
||||
parser.add_argument('--env', action='append', default=[],
|
||||
help='Environment to set when evaluating the config file', metavar='NAME=VAL')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
args.env = [ (name,value) for (name,value) in ( e.split("=",1) for e in args.env) ]
|
||||
except ValueError:
|
||||
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
|
||||
sys.exit(1)
|
||||
|
||||
for name, value in args.env:
|
||||
os.environ[name] = value
|
||||
|
||||
print("Server running, waiting for requests on stdin...", file=sys.stderr)
|
||||
run_server(args.kconfig, args.config)
|
||||
|
||||
|
||||
def run_server(kconfig, sdkconfig):
|
||||
config = kconfiglib.Kconfig(kconfig)
|
||||
config.load_config(sdkconfig)
|
||||
|
||||
config_dict = confgen.get_json_values(config)
|
||||
ranges_dict = get_ranges(config)
|
||||
json.dump({"version": 1, "values" : config_dict, "ranges" : ranges_dict}, sys.stdout)
|
||||
print("\n")
|
||||
|
||||
while True:
|
||||
line = sys.stdin.readline()
|
||||
if not line:
|
||||
break
|
||||
req = json.loads(line)
|
||||
before = confgen.get_json_values(config)
|
||||
before_ranges = get_ranges(config)
|
||||
|
||||
if "load" in req: # if we're loading a different sdkconfig, response should have all items in it
|
||||
before = {}
|
||||
before_ranges = {}
|
||||
|
||||
# if no new filename is supplied, use existing sdkconfig path, otherwise update the path
|
||||
if req["load"] is None:
|
||||
req["load"] = sdkconfig
|
||||
else:
|
||||
sdkconfig = req["load"]
|
||||
|
||||
if "save" in req:
|
||||
if req["save"] is None:
|
||||
req["save"] = sdkconfig
|
||||
else:
|
||||
sdkconfig = req["save"]
|
||||
|
||||
error = handle_request(config, req)
|
||||
|
||||
after = confgen.get_json_values(config)
|
||||
after_ranges = get_ranges(config)
|
||||
|
||||
values_diff = diff(before, after)
|
||||
ranges_diff = diff(before_ranges, after_ranges)
|
||||
response = {"version" : 1, "values" : values_diff, "ranges" : ranges_diff}
|
||||
if error:
|
||||
for e in error:
|
||||
print("Error: %s" % e, file=sys.stderr)
|
||||
response["error"] = error
|
||||
json.dump(response, sys.stdout)
|
||||
print("\n")
|
||||
|
||||
|
||||
def handle_request(config, req):
|
||||
if not "version" in req:
|
||||
return [ "All requests must have a 'version'" ]
|
||||
if int(req["version"]) != 1:
|
||||
return [ "Only version 1 requests supported" ]
|
||||
|
||||
error = []
|
||||
|
||||
if "load" in req:
|
||||
print("Loading config from %s..." % req["load"], file=sys.stderr)
|
||||
try:
|
||||
config.load_config(req["load"])
|
||||
except Exception as e:
|
||||
error += [ "Failed to load from %s: %s" % (req["load"], e) ]
|
||||
|
||||
if "set" in req:
|
||||
handle_set(config, error, req["set"])
|
||||
|
||||
if "save" in req:
|
||||
try:
|
||||
print("Saving config to %s..." % req["save"], file=sys.stderr)
|
||||
confgen.write_config(config, req["save"])
|
||||
except Exception as e:
|
||||
error += [ "Failed to save to %s: %s" % (req["save"], e) ]
|
||||
|
||||
return error
|
||||
|
||||
def handle_set(config, error, to_set):
|
||||
missing = [ k for k in to_set if not k in config.syms ]
|
||||
if missing:
|
||||
error.append("The following config symbol(s) were not found: %s" % (", ".join(missing)))
|
||||
# replace name keys with the full config symbol for each key:
|
||||
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if not k in missing)
|
||||
|
||||
# Work through the list of values to set, noting that
|
||||
# some may not be immediately applicable (maybe they depend
|
||||
# on another value which is being set). Therefore, defer
|
||||
# knowing if any value is unsettable until then end
|
||||
|
||||
while len(to_set):
|
||||
set_pass = [ (k,v) for (k,v) in to_set.items() if k.visibility ]
|
||||
if not set_pass:
|
||||
break # no visible keys left
|
||||
for (sym,val) in set_pass:
|
||||
if sym.type in (kconfiglib.BOOL, kconfiglib.TRISTATE):
|
||||
if val == True:
|
||||
sym.set_value(2)
|
||||
elif val == False:
|
||||
sym.set_value(0)
|
||||
else:
|
||||
error.append("Boolean symbol %s only accepts true/false values" % sym.name)
|
||||
else:
|
||||
sym.set_value(str(val))
|
||||
print("Set %s" % sym.name)
|
||||
del to_set[sym]
|
||||
|
||||
if len(to_set):
|
||||
error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set)))
|
||||
|
||||
|
||||
|
||||
def diff(before, after):
|
||||
"""
|
||||
Return a dictionary with the difference between 'before' and 'after' (either with the new value if changed,
|
||||
or None as the value if a key in 'before' is missing in 'after'
|
||||
"""
|
||||
diff = dict((k,v) for (k,v) in after.items() if before.get(k, None) != v)
|
||||
hidden = dict((k,None) for k in before if k not in after)
|
||||
diff.update(hidden)
|
||||
return diff
|
||||
|
||||
|
||||
def get_ranges(config):
|
||||
ranges_dict = {}
|
||||
def handle_node(node):
|
||||
sym = node.item
|
||||
if not isinstance(sym, kconfiglib.Symbol):
|
||||
return
|
||||
active_range = sym.active_range
|
||||
if active_range[0] is not None:
|
||||
ranges_dict[sym.name] = active_range
|
||||
|
||||
config.walk_menu(handle_node)
|
||||
return ranges_dict
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except FatalError as e:
|
||||
print("A fatal error occurred: %s" % e, file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
@ -20,7 +20,9 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import kconfiglib
|
||||
|
||||
# Indentation to be used in the generated file
|
||||
@ -40,6 +42,11 @@ def write_docs(config, filename):
|
||||
with open(filename, "w") as f:
|
||||
config.walk_menu(lambda node: write_menu_item(f, node))
|
||||
|
||||
def node_is_menu(node):
|
||||
try:
|
||||
return node.item == kconfiglib.MENU or node.is_menuconfig
|
||||
except AttributeError:
|
||||
return False # not all MenuNodes have is_menuconfig for some reason
|
||||
|
||||
def get_breadcrumbs(node):
|
||||
# this is a bit wasteful as it recalculates each time, but still...
|
||||
@ -47,12 +54,26 @@ def get_breadcrumbs(node):
|
||||
node = node.parent
|
||||
while node.parent:
|
||||
if node.prompt:
|
||||
result = [ node.prompt[0] ] + result
|
||||
result = [ ":ref:`%s`" % get_link_anchor(node) ] + result
|
||||
node = node.parent
|
||||
return " > ".join(result)
|
||||
|
||||
def get_link_anchor(node):
|
||||
try:
|
||||
return "CONFIG_%s" % node.item.name
|
||||
except AttributeError:
|
||||
assert(node_is_menu(node)) # only menus should have no item.name
|
||||
|
||||
# for menus, build a link anchor out of the parents
|
||||
result = []
|
||||
while node.parent:
|
||||
if node.prompt:
|
||||
result = [ re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0]) ] + result
|
||||
node = node.parent
|
||||
result = "-".join(result).lower()
|
||||
return result
|
||||
|
||||
def get_heading_level(node):
|
||||
# bit wasteful also
|
||||
result = INITIAL_HEADING_LEVEL
|
||||
node = node.parent
|
||||
while node.parent:
|
||||
@ -71,42 +92,41 @@ def format_rest_text(text, indent):
|
||||
text += '\n'
|
||||
return text
|
||||
|
||||
def write_menu_item(f, node):
|
||||
def node_should_write(node):
|
||||
if not node.prompt:
|
||||
return # Don't do anything for invisible menu items
|
||||
return False # Don't do anything for invisible menu items
|
||||
|
||||
if isinstance(node.parent.item, kconfiglib.Choice):
|
||||
return # Skip choice nodes, they are handled as part of the parent (see below)
|
||||
return False # Skip choice nodes, they are handled as part of the parent (see below)
|
||||
|
||||
return True
|
||||
|
||||
def write_menu_item(f, node):
|
||||
if not node_should_write(node):
|
||||
return
|
||||
|
||||
try:
|
||||
name = node.item.name
|
||||
except AttributeError:
|
||||
name = None
|
||||
|
||||
try:
|
||||
is_menu = node.item == kconfiglib.MENU or node.is_menuconfig
|
||||
except AttributeError:
|
||||
is_menu = False # not all MenuNodes have is_menuconfig for some reason
|
||||
is_menu = node_is_menu(node)
|
||||
|
||||
## Heading
|
||||
if name:
|
||||
title = name
|
||||
# add link target so we can use :ref:`CONFIG_FOO`
|
||||
f.write('.. _CONFIG_%s:\n\n' % name)
|
||||
title = 'CONFIG_%s' % name
|
||||
else:
|
||||
# if no symbol name, use the prompt as the heading
|
||||
title = node.prompt[0]
|
||||
|
||||
# if no symbol name, use the prompt as the heading
|
||||
if True or is_menu:
|
||||
f.write('%s\n' % title)
|
||||
f.write(HEADING_SYMBOLS[get_heading_level(node)] * len(title))
|
||||
f.write('\n\n')
|
||||
else:
|
||||
f.write('**%s**\n\n\n' % title)
|
||||
f.write(".. _%s:\n\n" % get_link_anchor(node))
|
||||
f.write('%s\n' % title)
|
||||
f.write(HEADING_SYMBOLS[get_heading_level(node)] * len(title))
|
||||
f.write('\n\n')
|
||||
|
||||
if name:
|
||||
f.write('%s%s\n\n' % (INDENT, node.prompt[0]))
|
||||
f.write('%s:emphasis:`Found in: %s`\n\n' % (INDENT, get_breadcrumbs(node)))
|
||||
f.write('%s:emphasis:`Found in:` %s\n\n' % (INDENT, get_breadcrumbs(node)))
|
||||
|
||||
try:
|
||||
if node.help:
|
||||
@ -131,6 +151,21 @@ def write_menu_item(f, node):
|
||||
|
||||
f.write('\n\n')
|
||||
|
||||
if is_menu:
|
||||
# enumerate links to child items
|
||||
first = True
|
||||
child = node.list
|
||||
while child:
|
||||
try:
|
||||
if node_should_write(child):
|
||||
if first:
|
||||
f.write("Contains:\n\n")
|
||||
first = False
|
||||
f.write('- :ref:`%s`\n' % get_link_anchor(child))
|
||||
except AttributeError:
|
||||
pass
|
||||
child = child.next
|
||||
f.write('\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("Run this via 'confgen.py --output doc FILENAME'")
|
||||
|
@ -500,6 +500,8 @@ class Kconfig(object):
|
||||
__slots__ = (
|
||||
"_choices",
|
||||
"_print_undef_assign",
|
||||
"_print_override",
|
||||
"_print_redun_assign",
|
||||
"_print_warnings",
|
||||
"_set_re_match",
|
||||
"_unset_re_match",
|
||||
@ -575,6 +577,7 @@ class Kconfig(object):
|
||||
|
||||
self._print_warnings = warn
|
||||
self._print_undef_assign = False
|
||||
self._print_redun_assign = self._print_override = True
|
||||
|
||||
self.syms = {}
|
||||
self.const_syms = {}
|
||||
@ -754,6 +757,9 @@ class Kconfig(object):
|
||||
continue
|
||||
|
||||
if sym.orig_type in (BOOL, TRISTATE):
|
||||
if val == "":
|
||||
val = "n" # C implementation allows 'blank' for 'no'
|
||||
|
||||
# The C implementation only checks the first character
|
||||
# to the right of '=', for whatever reason
|
||||
if not ((sym.orig_type == BOOL and
|
||||
@ -823,10 +829,12 @@ class Kconfig(object):
|
||||
display_val = val
|
||||
display_user_val = sym.user_value
|
||||
|
||||
self._warn('{} set more than once. Old value: "{}", new '
|
||||
'value: "{}".'
|
||||
.format(name, display_user_val, display_val),
|
||||
filename, linenr)
|
||||
msg = '{} set more than once. Old value: "{}", new value: "{}".'.format(name, display_user_val, display_val)
|
||||
|
||||
if display_user_val == display_val:
|
||||
self._warn_redun_assign(msg, filename, linenr)
|
||||
else:
|
||||
self._warn_override(msg, filename, linenr)
|
||||
|
||||
sym.set_value(val)
|
||||
|
||||
@ -924,7 +932,7 @@ class Kconfig(object):
|
||||
|
||||
def write_node(node):
|
||||
item = node.item
|
||||
if isinstance(item, Symbol):
|
||||
if isinstance(item, Symbol) and item.env_var is None:
|
||||
config_string = item.config_string
|
||||
if config_string:
|
||||
write(config_string)
|
||||
@ -1054,6 +1062,36 @@ class Kconfig(object):
|
||||
"""
|
||||
self._print_undef_assign = False
|
||||
|
||||
def enable_redun_warnings(self):
|
||||
"""
|
||||
Enables warnings for redundant assignments to symbols. Printed to
|
||||
stderr. Enabled by default.
|
||||
"""
|
||||
self._print_redun_assign = True
|
||||
|
||||
def disable_redun_warnings(self):
|
||||
"""
|
||||
See enable_redun_warnings().
|
||||
"""
|
||||
self._print_redun_assign = False
|
||||
|
||||
def enable_override_warnings(self):
|
||||
"""
|
||||
Enables warnings for duplicated assignments in .config files that set
|
||||
different values (e.g. CONFIG_FOO=m followed by CONFIG_FOO=y, where
|
||||
the last value set is used).
|
||||
|
||||
These warnings are enabled by default. Disabling them might be helpful
|
||||
in certain cases when merging configurations.
|
||||
"""
|
||||
self._print_override = True
|
||||
|
||||
def disable_override_warnings(self):
|
||||
"""
|
||||
See enable_override_warnings().
|
||||
"""
|
||||
self._print_override = False
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
Returns a string with information about the Kconfig object when it is
|
||||
@ -1068,6 +1106,8 @@ class Kconfig(object):
|
||||
"warnings " + ("enabled" if self._print_warnings else "disabled"),
|
||||
"undef. symbol assignment warnings " +
|
||||
("enabled" if self._print_undef_assign else "disabled"),
|
||||
"redundant symbol assignment warnings " +
|
||||
("enabled" if self._print_redun_assign else "disabled")
|
||||
)))
|
||||
|
||||
#
|
||||
@ -2147,6 +2187,19 @@ class Kconfig(object):
|
||||
'attempt to assign the value "{}" to the undefined symbol {}' \
|
||||
.format(val, name), filename, linenr)
|
||||
|
||||
def _warn_redun_assign(self, msg, filename=None, linenr=None):
|
||||
"""
|
||||
See the class documentation.
|
||||
"""
|
||||
if self._print_redun_assign:
|
||||
_stderr_msg("warning: " + msg, filename, linenr)
|
||||
|
||||
def _warn_override(self, msg, filename=None, linenr=None):
|
||||
"""
|
||||
See the class documentation.
|
||||
"""
|
||||
if self._print_override:
|
||||
_stderr_msg("warning: " + msg, filename, linenr)
|
||||
|
||||
class Symbol(object):
|
||||
"""
|
||||
@ -2419,24 +2472,11 @@ class Symbol(object):
|
||||
base = _TYPE_TO_BASE[self.orig_type]
|
||||
|
||||
# Check if a range is in effect
|
||||
for low_expr, high_expr, cond in self.ranges:
|
||||
if expr_value(cond):
|
||||
has_active_range = True
|
||||
|
||||
# The zeros are from the C implementation running strtoll()
|
||||
# on empty strings
|
||||
low = int(low_expr.str_value, base) if \
|
||||
_is_base_n(low_expr.str_value, base) else 0
|
||||
high = int(high_expr.str_value, base) if \
|
||||
_is_base_n(high_expr.str_value, base) else 0
|
||||
|
||||
break
|
||||
else:
|
||||
has_active_range = False
|
||||
low, high = self.active_range
|
||||
|
||||
if vis and self.user_value is not None and \
|
||||
_is_base_n(self.user_value, base) and \
|
||||
(not has_active_range or
|
||||
(low is None or
|
||||
low <= int(self.user_value, base) <= high):
|
||||
|
||||
# If the user value is well-formed and satisfies range
|
||||
@ -2463,7 +2503,7 @@ class Symbol(object):
|
||||
val_num = 0 # strtoll() on empty string
|
||||
|
||||
# This clamping procedure runs even if there's no default
|
||||
if has_active_range:
|
||||
if low is not None:
|
||||
clamp = None
|
||||
if val_num < low:
|
||||
clamp = low
|
||||
@ -2714,6 +2754,28 @@ class Symbol(object):
|
||||
if self._is_user_assignable():
|
||||
self._rec_invalidate()
|
||||
|
||||
@property
|
||||
def active_range(self):
|
||||
"""
|
||||
Returns a tuple of (low, high) integer values if a range
|
||||
limit is active for this symbol, or (None, None) if no range
|
||||
limit exists.
|
||||
"""
|
||||
base = _TYPE_TO_BASE[self.orig_type]
|
||||
|
||||
for low_expr, high_expr, cond in self.ranges:
|
||||
if expr_value(cond):
|
||||
# The zeros are from the C implementation running strtoll()
|
||||
# on empty strings
|
||||
low = int(low_expr.str_value, base) if \
|
||||
_is_base_n(low_expr.str_value, base) else 0
|
||||
high = int(high_expr.str_value, base) if \
|
||||
_is_base_n(high_expr.str_value, base) else 0
|
||||
|
||||
return (low, high)
|
||||
return (None, None)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
Returns a string with information about the symbol (including its name,
|
||||
|
44
tools/kconfig_new/test/Kconfig
Normal file
44
tools/kconfig_new/test/Kconfig
Normal file
@ -0,0 +1,44 @@
|
||||
menu "Test config"
|
||||
|
||||
config TEST_BOOL
|
||||
bool "Test boolean"
|
||||
default n
|
||||
|
||||
config TEST_CHILD_BOOL
|
||||
bool "Test boolean"
|
||||
depends on TEST_BOOL
|
||||
default y
|
||||
|
||||
config TEST_CHILD_STR
|
||||
string "Test str"
|
||||
depends on TEST_BOOL
|
||||
default "OHAI!"
|
||||
|
||||
choice TEST_CHOICE
|
||||
prompt "Some choice"
|
||||
default CHOICE_A
|
||||
|
||||
config CHOICE_A
|
||||
bool "A"
|
||||
|
||||
config CHOICE_B
|
||||
bool "B"
|
||||
|
||||
endchoice
|
||||
|
||||
config DEPENDS_ON_CHOICE
|
||||
string "Depends on choice"
|
||||
default "Depends on A" if CHOICE_A
|
||||
default "Depends on B" if CHOICE_B
|
||||
default "WAT"
|
||||
|
||||
config SOME_UNRELATED_THING
|
||||
bool "Some unrelated thing"
|
||||
|
||||
config TEST_CONDITIONAL_RANGES
|
||||
int "Something with a range"
|
||||
range 0 100 if TEST_BOOL
|
||||
range 0 10
|
||||
default 1
|
||||
|
||||
endmenu
|
1
tools/kconfig_new/test/sdkconfig
Normal file
1
tools/kconfig_new/test/sdkconfig
Normal file
@ -0,0 +1 @@
|
||||
CONFIG_SOME_UNRELATED_THING=y
|
116
tools/kconfig_new/test/test_confserver.py
Executable file
116
tools/kconfig_new/test/test_confserver.py
Executable file
@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import json
|
||||
import argparse
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import pexpect
|
||||
|
||||
sys.path.append("..")
|
||||
import confserver
|
||||
|
||||
def create_server_thread(*args):
|
||||
t = threading.Thread()
|
||||
|
||||
def parse_testcases():
|
||||
with open("testcases.txt", "r") as f:
|
||||
cases = [ l for l in f.readlines() if len(l.strip()) > 0 ]
|
||||
# Each 3 lines in the file should be formatted as:
|
||||
# * Description of the test change
|
||||
# * JSON "changes" to send to the server
|
||||
# * Result JSON to expect back from the server
|
||||
if len(cases) % 3 != 0:
|
||||
print("Warning: testcases.txt has wrong number of non-empty lines (%d). Should be 3 lines per test case, always." % len(cases))
|
||||
|
||||
for i in range(0, len(cases), 3):
|
||||
desc = cases[i]
|
||||
send = cases[i+1]
|
||||
expect = cases[i+2]
|
||||
if not desc.startswith("* "):
|
||||
raise RuntimeError("Unexpected description at line %d: '%s'" % (i+1, desc))
|
||||
if not send.startswith("> "):
|
||||
raise RuntimeError("Unexpected send at line %d: '%s'" % (i+2, send))
|
||||
if not expect.startswith("< "):
|
||||
raise RuntimeError("Unexpected expect at line %d: '%s'" % (i+3, expect))
|
||||
desc = desc[2:]
|
||||
send = json.loads(send[2:])
|
||||
expect = json.loads(expect[2:])
|
||||
yield (desc, send, expect)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--logfile', type=argparse.FileType('w'), help='Optional session log of the interactions with confserver.py')
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
# set up temporary file to use as sdkconfig copy
|
||||
with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_sdkconfig:
|
||||
temp_sdkconfig_path = os.path.join(tempfile.gettempdir(), temp_sdkconfig.name)
|
||||
with open("sdkconfig") as orig:
|
||||
temp_sdkconfig.write(orig.read())
|
||||
|
||||
cmdline = "../confserver.py --kconfig Kconfig --config %s" % temp_sdkconfig_path
|
||||
print("Running: %s" % cmdline)
|
||||
p = pexpect.spawn(cmdline, timeout=0.5)
|
||||
p.logfile = args.logfile
|
||||
p.setecho(False)
|
||||
|
||||
def expect_json():
|
||||
# run p.expect() to expect a json object back, and return it as parsed JSON
|
||||
p.expect("{.+}\r\n")
|
||||
return json.loads(p.match.group(0).strip().decode())
|
||||
|
||||
p.expect("Server running.+\r\n")
|
||||
initial = expect_json()
|
||||
print("Initial: %s" % initial)
|
||||
cases = parse_testcases()
|
||||
|
||||
for (desc, send, expected) in cases:
|
||||
print(desc)
|
||||
req = { "version" : "1", "set" : send }
|
||||
req = json.dumps(req)
|
||||
print("Sending: %s" % (req))
|
||||
p.send("%s\n" % req)
|
||||
readback = expect_json()
|
||||
print("Read back: %s" % (json.dumps(readback)))
|
||||
if readback.get("version", None) != 1:
|
||||
raise RuntimeError('Expected {"version" : 1} in response')
|
||||
for expect_key in expected.keys():
|
||||
read_vals = readback[expect_key]
|
||||
exp_vals = expected[expect_key]
|
||||
if read_vals != exp_vals:
|
||||
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if not k in read_vals or v != read_vals[k])
|
||||
raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff)))
|
||||
print("OK")
|
||||
|
||||
print("Testing load/save...")
|
||||
before = os.stat(temp_sdkconfig_path).st_mtime
|
||||
p.send("%s\n" % json.dumps({ "version" : "1", "save" : temp_sdkconfig_path }))
|
||||
save_result = expect_json()
|
||||
print("Save result: %s" % (json.dumps(save_result)))
|
||||
assert len(save_result["values"]) == 0
|
||||
assert len(save_result["ranges"]) == 0
|
||||
after = os.stat(temp_sdkconfig_path).st_mtime
|
||||
assert after > before
|
||||
|
||||
p.send("%s\n" % json.dumps({ "version" : "1", "load" : temp_sdkconfig_path }))
|
||||
load_result = expect_json()
|
||||
print("Load result: %s" % (json.dumps(load_result)))
|
||||
assert len(load_result["values"]) > 0 # loading same file should return all config items
|
||||
assert len(load_result["ranges"]) > 0
|
||||
print("Done. All passed.")
|
||||
|
||||
finally:
|
||||
try:
|
||||
os.remove(temp_sdkconfig_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
31
tools/kconfig_new/test/testcases.txt
Normal file
31
tools/kconfig_new/test/testcases.txt
Normal file
@ -0,0 +1,31 @@
|
||||
* Set TEST_BOOL, showing child items
|
||||
> { "TEST_BOOL" : true }
|
||||
< { "values" : { "TEST_BOOL" : true, "TEST_CHILD_STR" : "OHAI!", "TEST_CHILD_BOOL" : true }, "ranges": {"TEST_CONDITIONAL_RANGES": [0, 100]} }
|
||||
|
||||
* Set TEST_CHILD_STR
|
||||
> { "TEST_CHILD_STR" : "Other value" }
|
||||
< { "values" : { "TEST_CHILD_STR" : "Other value" } }
|
||||
|
||||
* Clear TEST_BOOL, hiding child items
|
||||
> { "TEST_BOOL" : false }
|
||||
< { "values" : { "TEST_BOOL" : false, "TEST_CHILD_STR" : null, "TEST_CHILD_BOOL" : null }, "ranges": {"TEST_CONDITIONAL_RANGES": [0, 10]} }
|
||||
|
||||
* Set TEST_CHILD_BOOL, invalid as parent is disabled
|
||||
> { "TEST_CHILD_BOOL" : false }
|
||||
< { "values" : { } }
|
||||
|
||||
* Set TEST_BOOL & TEST_CHILD_STR together
|
||||
> { "TEST_BOOL" : true, "TEST_CHILD_STR" : "New value" }
|
||||
< { "values" : { "TEST_BOOL" : true, "TEST_CHILD_STR" : "New value", "TEST_CHILD_BOOL" : true } }
|
||||
|
||||
* Set choice
|
||||
> { "CHOICE_B" : true }
|
||||
< { "values" : { "CHOICE_B" : true, "CHOICE_A" : false, "DEPENDS_ON_CHOICE" : "Depends on B" } }
|
||||
|
||||
* Set string which depends on choice B
|
||||
> { "DEPENDS_ON_CHOICE" : "oh, really?" }
|
||||
< { "values" : { "DEPENDS_ON_CHOICE" : "oh, really?" } }
|
||||
|
||||
* Try setting boolean values to invalid types
|
||||
> { "CHOICE_A" : 11, "TEST_BOOL" : "false" }
|
||||
< { "values" : { } }
|
@ -1,6 +1,6 @@
|
||||
SUPPORTED_TOOLCHAIN_COMMIT_DESC = crosstool-ng-1.22.0-80-g6c4433a
|
||||
SUPPORTED_TOOLCHAIN_COMMIT_DESC = crosstool-ng-1.22.0-92-g8facf4c
|
||||
SUPPORTED_TOOLCHAIN_GCC_VERSIONS = 5.2.0
|
||||
|
||||
CURRENT_TOOLCHAIN_COMMIT_DESC = crosstool-ng-1.22.0-80-g6c4433a
|
||||
CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT = 1.22.0-80-g6c4433a
|
||||
CURRENT_TOOLCHAIN_COMMIT_DESC = crosstool-ng-1.22.0-92-g8facf4c
|
||||
CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT = 1.22.0-92-g8facf4c
|
||||
CURRENT_TOOLCHAIN_GCC_VERSION = 5.2.0
|
||||
|
@ -41,7 +41,7 @@ Name: python32; Description: Download and Run Python 2.7.14 Installer and instal
|
||||
Name: python64; Description: Download and Run Python 2.7.14 Installer and install pyserial; GroupDescription: "Other Required Tools:"; Check: IsWin64 and not Python27Installed
|
||||
|
||||
[Files]
|
||||
Components: toolchain; Source: "input\xtensa-esp32-elf\*"; DestDir: "{app}\toolchain\"; Flags: recursesubdirs;
|
||||
Components: toolchain; Source: "input\xtensa-esp8266-elf\*"; DestDir: "{app}\toolchain\"; Flags: recursesubdirs;
|
||||
Components: mconf; Source: "input\mconf-v4.6.0.0-idf-20180319-win32\*"; DestDir: "{app}\mconf\";
|
||||
Components: ninja; Source: "input\ninja.exe"; DestDir: "{app}";
|
||||
|
||||
|
Reference in New Issue
Block a user