From 688953795134fd7a143d1778113a876040a93887 Mon Sep 17 00:00:00 2001 From: dongheng Date: Mon, 18 Mar 2019 13:54:57 +0800 Subject: [PATCH 1/2] tools(unit-test_app): Update from esp-idf Commit ID: 13018449 --- tools/unit-test-app/CMakeLists.txt | 6 + tools/unit-test-app/Makefile | 68 +- tools/unit-test-app/README.md | 46 +- .../components/unity/CMakeLists.txt | 10 + tools/unit-test-app/components/unity/Kconfig | 15 + .../components/unity/component.mk | 4 + .../unity/include/idf_performance.h | 32 + .../components/unity/include/test_utils.h | 109 +++ .../components/unity/include/unity.h | 3 + .../components/unity/include/unity_config.h | 14 +- .../components/unity/ref_clock.c | 170 ++++ .../components/unity/test_utils.c | 86 ++ .../components/unity/unity_platform.c | 127 ++- tools/unit-test-app/configs/aes_no_hw | 3 + tools/unit-test-app/configs/app_update | 13 + tools/unit-test-app/configs/bt | 4 + tools/unit-test-app/configs/default | 1 + tools/unit-test-app/configs/libsodium | 3 + tools/unit-test-app/configs/psram | 3 + tools/unit-test-app/configs/psram_2 | 3 + tools/unit-test-app/configs/psram_8m | 4 + tools/unit-test-app/configs/release | 3 + tools/unit-test-app/configs/single_core | 4 + tools/unit-test-app/idf_ext.py | 279 +++++++ tools/unit-test-app/main/CMakeLists.txt | 4 + tools/unit-test-app/main/app_main.c | 12 +- .../partition_table_unit_test_app.csv | 17 + .../partition_table_unit_test_two_ota.csv | 11 + tools/unit-test-app/sdkconfig.defaults | 30 + .../unit-test-app/tools/ConfigDependency.yml | 2 + .../unit-test-app/tools/CreateSectionTable.py | 163 ++++ .../unit-test-app/tools/ModuleDefinition.yml | 127 +++ tools/unit-test-app/tools/TagDefinition.yml | 20 + tools/unit-test-app/tools/UnitTestParser.py | 318 ++++++++ tools/unit-test-app/unit_test.py | 748 ++++++++++++++++++ 35 files changed, 2388 insertions(+), 74 deletions(-) create mode 100644 tools/unit-test-app/CMakeLists.txt create mode 100644 tools/unit-test-app/components/unity/CMakeLists.txt create mode 100644 tools/unit-test-app/components/unity/Kconfig create mode 100644 tools/unit-test-app/components/unity/include/idf_performance.h create mode 100644 tools/unit-test-app/components/unity/include/test_utils.h create mode 100644 tools/unit-test-app/components/unity/ref_clock.c create mode 100644 tools/unit-test-app/components/unity/test_utils.c create mode 100644 tools/unit-test-app/configs/aes_no_hw create mode 100644 tools/unit-test-app/configs/app_update create mode 100644 tools/unit-test-app/configs/bt create mode 100644 tools/unit-test-app/configs/default create mode 100644 tools/unit-test-app/configs/libsodium create mode 100644 tools/unit-test-app/configs/psram create mode 100644 tools/unit-test-app/configs/psram_2 create mode 100644 tools/unit-test-app/configs/psram_8m create mode 100644 tools/unit-test-app/configs/release create mode 100644 tools/unit-test-app/configs/single_core create mode 100644 tools/unit-test-app/idf_ext.py create mode 100644 tools/unit-test-app/main/CMakeLists.txt create mode 100644 tools/unit-test-app/partition_table_unit_test_app.csv create mode 100644 tools/unit-test-app/partition_table_unit_test_two_ota.csv create mode 100644 tools/unit-test-app/sdkconfig.defaults create mode 100644 tools/unit-test-app/tools/ConfigDependency.yml create mode 100644 tools/unit-test-app/tools/CreateSectionTable.py create mode 100644 tools/unit-test-app/tools/ModuleDefinition.yml create mode 100644 tools/unit-test-app/tools/TagDefinition.yml create mode 100644 tools/unit-test-app/tools/UnitTestParser.py create mode 100755 tools/unit-test-app/unit_test.py diff --git a/tools/unit-test-app/CMakeLists.txt b/tools/unit-test-app/CMakeLists.txt new file mode 100644 index 00000000..95932b0c --- /dev/null +++ b/tools/unit-test-app/CMakeLists.txt @@ -0,0 +1,6 @@ +# The following lines of boilerplate have to be in your project's +# CMakeLists in this exact order for cmake to work correctly +cmake_minimum_required(VERSION 3.5) + +include($ENV{IDF_PATH}/tools/cmake/project.cmake) +project(unit-test-app) \ No newline at end of file diff --git a/tools/unit-test-app/Makefile b/tools/unit-test-app/Makefile index 05f1dc35..006bf975 100644 --- a/tools/unit-test-app/Makefile +++ b/tools/unit-test-app/Makefile @@ -5,9 +5,11 @@ PROJECT_NAME := unit-test-app -NON_INTERACTIVE_TARGET += ut-apply-config-% ut-clean-% +ifeq ($(MAKELEVEL),0) +# Set default target +all: -include $(IDF_PATH)/make/project.mk +# Define helper targets only when not recursing # List of unit-test-app configurations. # Each file in configs/ directory defines a configuration. The format is the @@ -21,8 +23,9 @@ CONFIG_CLEAN_TARGETS := $(addprefix ut-clean-,$(CONFIG_NAMES)) CONFIG_APPLY_TARGETS := $(addprefix ut-apply-config-,$(CONFIG_NAMES)) # Build (intermediate) and output (artifact) directories -BUILDS_DIR := $(PROJECT_PATH)/builds -BINARIES_DIR := $(PROJECT_PATH)/output +PROJECT_DIR := $(abspath $(dir $(firstword $(MAKEFILE_LIST)))) +BUILDS_DIR := $(PROJECT_DIR)/builds +BINARIES_DIR := $(PROJECT_DIR)/output # This generates per-config targets (clean, build, apply-config). define GenerateConfigTargets @@ -56,18 +59,30 @@ $(BINARIES_DIR)/%/$(PROJECT_NAME).bin: configs/% mkdir -p $(BINARIES_DIR)/$*/bootloader mkdir -p $(BUILDS_DIR)/$* # Prepare configuration: top-level sdkconfig.defaults file plus the current configuration (configs/$*) - $(summary) CONFIG $(BUILDS_DIR)/$*/sdkconfig + echo CONFIG $(BUILDS_DIR)/$*/sdkconfig rm -f $(BUILDS_DIR)/$*/sdkconfig cat sdkconfig.defaults > $(BUILDS_DIR)/$*/sdkconfig.defaults echo "" >> $(BUILDS_DIR)/$*/sdkconfig.defaults # in case there is no trailing newline in sdkconfig.defaults cat configs/$* >> $(BUILDS_DIR)/$*/sdkconfig.defaults + # Build, tweaking paths to sdkconfig and sdkconfig.defaults - $(summary) BUILD_CONFIG $(BUILDS_DIR)/$* - $(MAKE) defconfig all \ + echo BUILD_CONFIG $(BUILDS_DIR)/$* + # 'TEST_COMPONENTS=names' option can be added to configs/$* to limit the set + # of tests to build for given configuration. + # Build all tests if this option is not present. + test_components=`sed -n 's/^TEST_COMPONENTS=\(.*\)/\1/p' configs/$*`; \ + test_exclude_components=`sed -n 's/^TEST_EXCLUDE_COMPONENTS=\(.*\)/\1/p' configs/$*`; \ + tests_all=`test -n "$${test_components}"; echo $${?}`; \ + exclude_components=`sed -n 's/^EXCLUDE_COMPONENTS=\(.*\)/\1/p' configs/$*`; \ + $(MAKE) defconfig list-components all \ BUILD_DIR_BASE=$(BUILDS_DIR)/$* \ SDKCONFIG=$(BUILDS_DIR)/$*/sdkconfig \ - SDKCONFIG_DEFAULTS=$(BUILDS_DIR)/$*/sdkconfig.defaults - $(MAKE) print_flash_cmd \ + SDKCONFIG_DEFAULTS=$(BUILDS_DIR)/$*/sdkconfig.defaults \ + TEST_COMPONENTS="$${test_components}" \ + TEST_EXCLUDE_COMPONENTS="$${test_exclude_components}" \ + TESTS_ALL=$${tests_all} \ + EXCLUDE_COMPONENTS="$${exclude_components}" + $(MAKE) --silent print_flash_cmd \ BUILD_DIR_BASE=$(BUILDS_DIR)/$* \ SDKCONFIG=$(BUILDS_DIR)/$*/sdkconfig \ | sed -e 's:'$(BUILDS_DIR)/$*/'::g' \ @@ -77,7 +92,7 @@ $(BINARIES_DIR)/%/$(PROJECT_NAME).bin: configs/% cp $(BUILDS_DIR)/$*/$(PROJECT_NAME).elf $(BINARIES_DIR)/$*/ cp $(BUILDS_DIR)/$*/$(PROJECT_NAME).bin $(BINARIES_DIR)/$*/ cp $(BUILDS_DIR)/$*/$(PROJECT_NAME).map $(BINARIES_DIR)/$*/ - cp $(BUILDS_DIR)/$*/partition_table*.bin $(BINARIES_DIR)/$*/ + cp $(BUILDS_DIR)/$*/*.bin $(BINARIES_DIR)/$*/ cp $(BUILDS_DIR)/$*/sdkconfig $(BINARIES_DIR)/$*/ @@ -87,17 +102,44 @@ ut-help: @echo "make ut-build-NAME - Build unit-test-app with configuration provided in configs/NAME." @echo " Build directory will be builds/NAME/, output binaries will be" @echo " under output/NAME/" - @echo "make ut-clean-NAME - Remove build and output directories for configuration NAME." @echo "" @echo "make ut-build-all-configs - Build all configurations defined in configs/ directory." @echo "" + @echo "Above targets determine list of components to be built from configs/NAME files." + @echo "To build custom subset of components use 'make ut-apply-config-NAME' and then 'make all'." + @echo "" @echo "make ut-apply-config-NAME - Generates configuration based on configs/NAME in sdkconfig" @echo " file. After this, normal all/flash targets can be used." @echo " Useful for development/debugging." @echo "" + @echo "make ut-clean-NAME - Remove build and output directories for configuration NAME." + @echo "" help: ut-help -.PHONY: ut-build-all-configs ut-clean-all-configs \ - $(CONFIG_BUILD_TARGETS) $(CONFIG_CLEAN_TARGETS) $(CONFIG_APPLY_TARGETS) \ +LOCAL_TARGETS := ut-build-all-configs ut-clean-all-configs \ + $(CONFIG_BUILD_TARGETS) $(CONFIG_CLEAN_TARGETS) \ ut-help + +.PHONY: $(LOCAL_TARGETS) + +NON_INTERACTIVE_TARGET += ut-apply-config-% ut-clean-% ut-build-% \ + ut-build-all-configs ut-clean-all-configs + +endif # MAKELEVEL == 0 + + +# When targets defined in this makefile are built, don't need to include the main project makefile. +# This prevents some variables which depend on build directory from being set erroneously. +ifeq ($(filter $(LOCAL_TARGETS),$(MAKECMDGOALS)),) + +include $(IDF_PATH)/make/project.mk + +endif + +# If recursing, print the actual list of tests being built +ifneq ($(MAKELEVEL),0) + +$(info TESTS $(foreach comp,$(TEST_COMPONENT_NAMES),$(patsubst %_test,%,$(comp)))) + +endif # MAKELEVEL != 0 diff --git a/tools/unit-test-app/README.md b/tools/unit-test-app/README.md index d59e71f7..9e197dfd 100644 --- a/tools/unit-test-app/README.md +++ b/tools/unit-test-app/README.md @@ -4,6 +4,8 @@ ESP-IDF unit tests are run using Unit Test App. The app can be built with the un # Building Unit Test App +## GNU Make + * Follow the setup instructions in the top-level esp-idf README. * Set IDF_PATH environment variable to point to the path to the esp-idf top-level directory. * Change into `tools/unit-test-app` directory @@ -12,11 +14,21 @@ ESP-IDF unit tests are run using Unit Test App. The app can be built with the un * Follow the printed instructions to flash, or run `make flash`. * Unit test have a few preset sdkconfigs. It provides command `make ut-clean-config_name` and `make ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `make ut-build-default TESTS_ALL=1` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder. +## CMake + +* Follow the setup instructions in the top-level esp-idf README. +* Set IDF_PATH environment variable to point to the path to the esp-idf top-level directory. +* Change into `tools/unit-test-app` directory +* `idf.py menuconfig` to configure the Unit Test App. +* `idf.py build -T ...` with `component` set to names of the components to be included in the test app. Or `idf.py build -T all` to build the test app with all the tests for components having `test` subdirectory. +* Follow the printed instructions to flash, or run `idf.py flash -p PORT`. +* Unit test have a few preset sdkconfigs. It provides command `idf.py ut-clean-config_name` and `idf.py ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `idf.py ut-build-default -T all` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder. + # Flash Size -The unit test partition table assumes a 4MB flash size. When testing `TESTS_ALL=1`, this additional factory app partition size is required. +The unit test partition table assumes a 4MB flash size. When testing `TESTS_ALL=1` (Make) or `-T all` (CMake), this additional factory app partition size is required. -If building unit tests to run on a smaller flash size, edit `partition_table_unit_tests_app.csv` and use `TEST_COMPONENTS=` instead of `TESTS_ALL` if tests don't fit in a smaller factory app partition (exact size will depend on configured options). +If building unit tests to run on a smaller flash size, edit `partition_table_unit_tests_app.csv` and use `TEST_COMPONENTS=` (Make) or `-T ...` (CMake) instead of `TESTS_ALL` or `-T all` if tests don't fit in a smaller factory app partition (exact size will depend on configured options). # Running Unit Tests @@ -68,7 +80,7 @@ Unit test jobs will do reset before running each case (because some cases do not Gitlab CI do not support create jobs at runtime. We must maunally add all jobs to CI config file. To make test running in parallel, we limit the number of cases running on each job. When add new unit test cases, it could exceed the limitation that current unit test jobs support. In this case, assign test job will raise error, remind you to add jobs to `.gitlab-ci.yml`. ``` -Please add the following jobs to .gitlab-ci.yml with specific tags: +Too many test cases vs jobs to run. Please add the following jobs to .gitlab-ci.yml with specific tags: * Add job with: UT_T1_1, ESP32_IDF, psram * Add job with: UT_T1_1, ESP32_IDF ``` @@ -103,9 +115,31 @@ If you want to reproduce locally, you need to: 2. Check the following print in CI job to get the config name: `Running unit test for config: config_name`. Then flash the binary of this config to your board. 3. Run the failed case on your board (refer to Running Unit Tests section). * There're some special UT cases (multiple stages case, multiple devices cases) which requires user interaction: - * You can refer to [unit test document](https://esp-idf.readthedocs.io/en/latest/api-guides/unit-tests.html#running-unit-tests) to run test manually. + * You can refer to [unit test document](https://docs.espressif.com/projects/esp-idf/en/latest/api-guides/unit-tests.html#running-unit-tests) to run test manually. * Or, you can use `tools/unit-test-app/unit_test.py` to run the test cases: * read document of tiny-test-fw, set correct `TEST_FW_PATH` and `IDF_PATH` - * modify `unit_test.py`, pass the test cases need to test as parameter (refer to test function doc string for supported parameter format) to test functions. - * use `python unit_test.py` to run test + * run `unit_test.py` (see examples below) * You can also use `tools/tiny-test-fw/Runner.py` to run test cases (it will be the same as what Runner do). Please use `python Runner.py -c $CONFIG_FILE $IDF_PATH/tools/unit-test-app` command, where `CONFIG_FILE` is a YAML file with same name with CI job in `components/idf_test/unit_test/CIConfigs` (artifacts, need to be download from `assign_test` job). + +## Running unit tests on local machine by `unit_test.py` + +A couple of examples follow for running unit tests on local machine. + +```bash +# run a simple unit test +./unit_test.py "UART can do select()" +# repeat the tests two times +./unit_test.py -r 2 "UART can do select()" +# use custom environment config file +./unit_test.py -e /tmp/EnvConfigTemplate.yml "UART can do select()" +# use custom application binary +./unit_test.py -b /tmp/app.bin "UART can do select()" +# run a list of unit tests +./unit_test.py "UART can do select()" "concurent selects work" +# add some options for unit tests +./unit_test.py "UART can do select()",timeout:10 "concurent selects work",config:release,env_tag:UT_T2_1 +# run a multi stage test (type of test and child case numbers are autodetected) +./unit_test.py "check a time after wakeup from deep sleep" +# run a list of different unit tests (one simple and one multi stage test) +./unit_test.py "concurent selects work" "NOINIT attributes behavior" +``` diff --git a/tools/unit-test-app/components/unity/CMakeLists.txt b/tools/unit-test-app/components/unity/CMakeLists.txt new file mode 100644 index 00000000..98d8fa6d --- /dev/null +++ b/tools/unit-test-app/components/unity/CMakeLists.txt @@ -0,0 +1,10 @@ +set(COMPONENT_SRCDIRS .) +set(COMPONENT_ADD_INCLUDEDIRS include) + +set(COMPONENT_REQUIRES spi_flash idf_test) + +register_component() + +if(GCC_NOT_5_2_0) + component_compile_options(-Wno-unused-const-variable) +endif() \ No newline at end of file diff --git a/tools/unit-test-app/components/unity/Kconfig b/tools/unit-test-app/components/unity/Kconfig new file mode 100644 index 00000000..642d76f9 --- /dev/null +++ b/tools/unit-test-app/components/unity/Kconfig @@ -0,0 +1,15 @@ +menu "Unity test framework" + +config UNITY_FREERTOS_PRIORITY + int "Priority of Unity test task" + default 5 + +config UNITY_FREERTOS_CPU + int "CPU to run Unity test task on" + default 0 + +config UNITY_FREERTOS_STACK_SIZE + int "Stack size of Unity test task, in bytes" + default 8192 + +endmenu diff --git a/tools/unit-test-app/components/unity/component.mk b/tools/unit-test-app/components/unity/component.mk index ebd7a7d5..c3c44cc0 100644 --- a/tools/unit-test-app/components/unity/component.mk +++ b/tools/unit-test-app/components/unity/component.mk @@ -1,3 +1,7 @@ # # Component Makefile # + +ifeq ($(GCC_NOT_5_2_0), 1) +unity.o: CFLAGS += -Wno-unused-const-variable +endif \ No newline at end of file diff --git a/tools/unit-test-app/components/unity/include/idf_performance.h b/tools/unit-test-app/components/unity/include/idf_performance.h new file mode 100644 index 00000000..60040303 --- /dev/null +++ b/tools/unit-test-app/components/unity/include/idf_performance.h @@ -0,0 +1,32 @@ + +/* @brief macro to print IDF performance + * @param mode : performance item name. a string pointer. + * @param value_fmt: print format and unit of the value, for example: "%02fms", "%dKB" + * @param value : the performance value. +*/ +#define IDF_LOG_PERFORMANCE(item, value_fmt, value) \ + printf("[Performance][%s]: "value_fmt"\n", item, value) + + +/* declare the performance here */ +#define IDF_PERFORMANCE_MAX_HTTPS_REQUEST_BIN_SIZE 800 +#define IDF_PERFORMANCE_MAX_FREERTOS_SPINLOCK_CYCLES_PER_OP 200 +#define IDF_PERFORMANCE_MAX_FREERTOS_SPINLOCK_CYCLES_PER_OP_PSRAM 300 +#define IDF_PERFORMANCE_MAX_FREERTOS_SPINLOCK_CYCLES_PER_OP_UNICORE 130 +#define IDF_PERFORMANCE_MAX_ESP_TIMER_GET_TIME_PER_CALL 1000 +#define IDF_PERFORMANCE_MAX_SPI_PER_TRANS_NO_POLLING 30 +#define IDF_PERFORMANCE_MAX_SPI_PER_TRANS_NO_POLLING_NO_DMA 27 +#define IDF_PERFORMANCE_MAX_SPI_PER_TRANS_POLLING 15 +#define IDF_PERFORMANCE_MAX_SPI_PER_TRANS_POLLING_NO_DMA 15 +/* Due to code size & linker layout differences interacting with cache, VFS + microbenchmark currently runs slower with PSRAM enabled. */ +#define IDF_PERFORMANCE_MAX_VFS_OPEN_WRITE_CLOSE_TIME 50000 +#define IDF_PERFORMANCE_MAX_VFS_OPEN_WRITE_CLOSE_TIME_PSRAM 40000 +// throughput performance by iperf +#define IDF_PERFORMANCE_MIN_TCP_RX_THROUGHPUT 50 +#define IDF_PERFORMANCE_MIN_TCP_TX_THROUGHPUT 40 +#define IDF_PERFORMANCE_MIN_UDP_RX_THROUGHPUT 80 +#define IDF_PERFORMANCE_MIN_UDP_TX_THROUGHPUT 50 +// events dispatched per second by event loop library +#define IDF_PERFORMANCE_MIN_EVENT_DISPATCH 25000 +#define IDF_PERFORMANCE_MIN_EVENT_DISPATCH_PSRAM 21000 diff --git a/tools/unit-test-app/components/unity/include/test_utils.h b/tools/unit-test-app/components/unity/include/test_utils.h new file mode 100644 index 00000000..68e8e81d --- /dev/null +++ b/tools/unit-test-app/components/unity/include/test_utils.h @@ -0,0 +1,109 @@ +// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +// Utilities for esp-idf unit tests + +#include +#include + +/* Return the 'flash_test' custom data partition (type 0x55) + defined in the custom partition table. +*/ +const esp_partition_t *get_test_data_partition(); + +/** + * @brief Initialize reference clock + * + * Reference clock provides timestamps at constant 1 MHz frequency, even when + * the APB frequency is changing. + */ +void ref_clock_init(); + +/** + * @brief Deinitialize reference clock + */ +void ref_clock_deinit(); + + +/** + * @brief Get reference clock timestamp + * @return number of microseconds since the reference clock was initialized + */ +uint64_t ref_clock_get(); + + +/** + * @brief Reset automatic leak checking which happens in unit tests. + * + * Updates recorded "before" free memory values to the free memory values + * at time of calling. Resets leak checker if tracing is enabled in + * config. + * + * This can be called if a test case does something which allocates + * memory on first use, for example. + * + * @note Use with care as this can mask real memory leak problems. + */ +void unity_reset_leak_checks(void); + + +/** + * @brief Call this function from a test case which requires TCP/IP or + * LWIP functionality. + * + * @note This should be the first function the test case calls, as it will + * allocate memory on first use (and also reset the test case leak checker). + */ +void test_case_uses_tcpip(void); + + +/** + * @brief wait for signals. + * + * for multiple devices test cases, DUT might need to wait for other DUTs before continue testing. + * As all DUTs are independent, need user (or test script) interaction to make test synchronized. + * + * Here we provide signal functions for this. + * For example, we're testing GPIO, DUT1 has one pin connect to with DUT2. + * DUT2 will output high level and then DUT1 will read input. + * DUT1 should call `unity_wait_for_signal("output high level");` before it reads input. + * DUT2 should call `unity_send_signal("output high level");` after it finished setting output high level. + * According to the console logs: + * + * DUT1 console: + * + * ``` + * Waiting for signal: [output high level]! + * Please press "Enter" key to once any board send this signal. + * ``` + * + * DUT2 console: + * + * ``` + * Send signal: [output high level]! + * ``` + * + * Then we press Enter key on DUT1's console, DUT1 starts to read input and then test success. + * + * @param signal_name signal name which DUT expected to wait before proceed testing + */ +void unity_wait_for_signal(const char* signal_name); + +/** + * @brief DUT send signal. + * + * @param signal_name signal name which DUT send once it finished preparing. + */ +void unity_send_signal(const char* signal_name); diff --git a/tools/unit-test-app/components/unity/include/unity.h b/tools/unit-test-app/components/unity/include/unity.h index 5dc0725b..596c806c 100644 --- a/tools/unit-test-app/components/unity/include/unity.h +++ b/tools/unit-test-app/components/unity/include/unity.h @@ -16,6 +16,9 @@ extern "C" #define UNITY_INCLUDE_CONFIG_H #include "unity_internals.h" +/* include performance pass standards header file */ +#include "idf_performance.h" + void setUp(void); void tearDown(void); diff --git a/tools/unit-test-app/components/unity/include/unity_config.h b/tools/unit-test-app/components/unity/include/unity_config.h index 94caca15..19f73b1c 100644 --- a/tools/unit-test-app/components/unity/include/unity_config.h +++ b/tools/unit-test-app/components/unity/include/unity_config.h @@ -1,17 +1,19 @@ #ifndef UNITY_CONFIG_H #define UNITY_CONFIG_H -#include - // This file gets included from unity.h via unity_internals.h // It is inside #ifdef __cplusplus / extern "C" block, so we can // only use C features here // Adapt Unity to our environment, disable FP support +#include +#include + /* Some definitions applicable to Unity running in FreeRTOS */ -#define UNITY_FREERTOS_PRIORITY 5 -#define UNITY_FREERTOS_CPU 0 +#define UNITY_FREERTOS_PRIORITY CONFIG_UNITY_FREERTOS_PRIORITY +#define UNITY_FREERTOS_CPU CONFIG_UNITY_FREERTOS_CPU +#define UNITY_FREERTOS_STACK_SIZE CONFIG_UNITY_FREERTOS_STACK_SIZE #define UNITY_EXCLUDE_FLOAT #define UNITY_EXCLUDE_DOUBLE @@ -49,7 +51,7 @@ #define UNITY_TEST_FN_SET(...) \ static test_func UNITY_TEST_UID(test_functions)[] = {__VA_ARGS__}; \ - static char* UNITY_TEST_UID(test_fn_name)[] = FN_NAME_SET(PP_NARG(__VA_ARGS__), __VA_ARGS__) + static const char* UNITY_TEST_UID(test_fn_name)[] = FN_NAME_SET(PP_NARG(__VA_ARGS__), __VA_ARGS__) typedef void (* test_func)(void); @@ -62,7 +64,7 @@ struct test_desc_t const char* file; int line; uint8_t test_fn_count; - char ** test_fn_name; + const char ** test_fn_name; struct test_desc_t* next; }; diff --git a/tools/unit-test-app/components/unity/ref_clock.c b/tools/unit-test-app/components/unity/ref_clock.c new file mode 100644 index 00000000..52d4f1ca --- /dev/null +++ b/tools/unit-test-app/components/unity/ref_clock.c @@ -0,0 +1,170 @@ +// Copyright 2017 Espressif Systems (Shanghai) PTE LTD +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* Unit tests need to have access to reliable timestamps even if CPU and APB + * clock frequencies change over time. This reference clock is built upon two + * peripherals: one RMT channel and one PCNT channel, plus one GPIO to connect + * these peripherals. + * + * RMT channel is configured to use REF_TICK as clock source, which is a 1 MHz + * clock derived from APB_CLK using a set of dividers. The divider is changed + * automatically by hardware depending on the current clock source of APB_CLK. + * For example, if APB_CLK is derived from PLL, one divider is used, and when + * APB_CLK is derived from XTAL, another divider is used. RMT channel clocked + * by REF_TICK is configured to generate a continuous 0.5 MHz signal, which is + * connected to a GPIO. PCNT takes the input signal from this GPIO and counts + * the edges (which occur at 1MHz frequency). PCNT counter is only 16 bit wide, + * so an interrupt is configured to trigger when the counter reaches 30000, + * incrementing a 32-bit millisecond counter maintained by software. + * Together these two counters may be used at any time to obtain the timestamp. + */ + +#include "test_utils.h" +#include "soc/rmt_struct.h" +#include "soc/pcnt_struct.h" +#include "soc/pcnt_reg.h" +#include "soc/gpio_sig_map.h" +#include "soc/dport_reg.h" +#include "rom/gpio.h" +#include "rom/ets_sys.h" +#include "driver/gpio.h" +#include "esp_intr_alloc.h" +#include "freertos/FreeRTOS.h" +#include "driver/periph_ctrl.h" + +/* Select which RMT and PCNT channels, and GPIO to use */ +#define REF_CLOCK_RMT_CHANNEL 7 +#define REF_CLOCK_PCNT_UNIT 0 +#define REF_CLOCK_GPIO 21 + +#define REF_CLOCK_PRESCALER_MS 30 + +static void IRAM_ATTR pcnt_isr(void* arg); + +static intr_handle_t s_intr_handle; +static portMUX_TYPE s_lock = portMUX_INITIALIZER_UNLOCKED; +static volatile uint32_t s_milliseconds; + +void ref_clock_init() +{ + assert(s_intr_handle == NULL && "already initialized"); + + // Route RMT output to GPIO matrix + gpio_matrix_out(REF_CLOCK_GPIO, RMT_SIG_OUT0_IDX + REF_CLOCK_RMT_CHANNEL, false, false); + + + // Initialize RMT + periph_module_enable(PERIPH_RMT_MODULE); + RMT.apb_conf.fifo_mask = 1; + rmt_item32_t data = { + .duration0 = 1, + .level0 = 1, + .duration1 = 0, + .level1 = 0 + }; + RMTMEM.chan[REF_CLOCK_RMT_CHANNEL].data32[0] = data; + RMTMEM.chan[REF_CLOCK_RMT_CHANNEL].data32[1].val = 0; + + + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf0.clk_en = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.tx_start = 0; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.mem_owner = 0; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.mem_rd_rst = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.apb_mem_rst = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf0.carrier_en = 0; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf0.div_cnt = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf0.mem_size = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.ref_always_on = 0; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.tx_conti_mode = 1; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.tx_start = 1; + + // Route signal to PCNT + int pcnt_sig_idx = (REF_CLOCK_PCNT_UNIT < 5) ? + PCNT_SIG_CH0_IN0_IDX + 4 * REF_CLOCK_PCNT_UNIT : + PCNT_SIG_CH0_IN5_IDX + 4 * (REF_CLOCK_PCNT_UNIT - 5); + gpio_matrix_in(REF_CLOCK_GPIO, pcnt_sig_idx, false); + if (REF_CLOCK_GPIO != 20) { + PIN_INPUT_ENABLE(GPIO_PIN_MUX_REG[REF_CLOCK_GPIO]); + } else { + PIN_INPUT_ENABLE(PERIPHS_IO_MUX_GPIO20_U); + } + + // Initialize PCNT + periph_module_enable(PERIPH_PCNT_MODULE); + + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.ch0_hctrl_mode = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.ch0_lctrl_mode = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.ch0_pos_mode = 1; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.ch0_neg_mode = 1; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.thr_l_lim_en = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.thr_h_lim_en = 1; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.thr_zero_en = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.thr_thres0_en = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf0.thr_thres1_en = 0; + PCNT.conf_unit[REF_CLOCK_PCNT_UNIT].conf2.cnt_h_lim = REF_CLOCK_PRESCALER_MS * 1000; + + // Enable PCNT and wait for it to start counting + PCNT.ctrl.val &= ~(BIT(REF_CLOCK_PCNT_UNIT * 2 + 1)); + PCNT.ctrl.val |= BIT(REF_CLOCK_PCNT_UNIT * 2); + PCNT.ctrl.val &= ~BIT(REF_CLOCK_PCNT_UNIT * 2); + + ets_delay_us(10000); + + // Enable interrupt + s_milliseconds = 0; + ESP_ERROR_CHECK(esp_intr_alloc(ETS_PCNT_INTR_SOURCE, ESP_INTR_FLAG_IRAM, pcnt_isr, NULL, &s_intr_handle)); + PCNT.int_clr.val = BIT(REF_CLOCK_PCNT_UNIT); + PCNT.int_ena.val = BIT(REF_CLOCK_PCNT_UNIT); +} + +static void IRAM_ATTR pcnt_isr(void* arg) +{ + portENTER_CRITICAL(&s_lock); + PCNT.int_clr.val = BIT(REF_CLOCK_PCNT_UNIT); + s_milliseconds += REF_CLOCK_PRESCALER_MS; + portEXIT_CRITICAL(&s_lock); +} + +void ref_clock_deinit() +{ + assert(s_intr_handle && "deinit called without init"); + + // Disable interrupt + PCNT.int_ena.val &= ~BIT(REF_CLOCK_PCNT_UNIT); + esp_intr_free(s_intr_handle); + s_intr_handle = NULL; + + // Disable RMT + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf1.tx_start = 0; + RMT.conf_ch[REF_CLOCK_RMT_CHANNEL].conf0.clk_en = 0; + periph_module_disable(PERIPH_RMT_MODULE); + + // Disable PCNT + PCNT.ctrl.val |= ~(BIT(REF_CLOCK_PCNT_UNIT * 2 + 1)); + periph_module_disable(PERIPH_PCNT_MODULE); +} + +uint64_t ref_clock_get() +{ + portENTER_CRITICAL(&s_lock); + uint32_t microseconds = PCNT.cnt_unit[REF_CLOCK_PCNT_UNIT].cnt_val; + uint32_t milliseconds = s_milliseconds; + if (PCNT.int_st.val & BIT(REF_CLOCK_PCNT_UNIT)) { + // refresh counter value, in case the overflow has happened after reading cnt_val + microseconds = PCNT.cnt_unit[REF_CLOCK_PCNT_UNIT].cnt_val; + milliseconds += REF_CLOCK_PRESCALER_MS; + } + portEXIT_CRITICAL(&s_lock); + return 1000 * (uint64_t) milliseconds + (uint64_t) microseconds; +} diff --git a/tools/unit-test-app/components/unity/test_utils.c b/tools/unit-test-app/components/unity/test_utils.c new file mode 100644 index 00000000..36aae4c2 --- /dev/null +++ b/tools/unit-test-app/components/unity/test_utils.c @@ -0,0 +1,86 @@ +// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include "unity.h" +#include "test_utils.h" +#include "rom/ets_sys.h" +#include "rom/uart.h" +#include "freertos/FreeRTOS.h" +#include "freertos/task.h" +#include "tcpip_adapter.h" +#include "lwip/sockets.h" + +const esp_partition_t *get_test_data_partition() +{ + /* This finds "flash_test" partition defined in partition_table_unit_test_app.csv */ + const esp_partition_t *result = esp_partition_find_first(ESP_PARTITION_TYPE_DATA, + ESP_PARTITION_SUBTYPE_ANY, "flash_test"); + TEST_ASSERT_NOT_NULL(result); /* means partition table set wrong */ + return result; +} + +// wait user to send "Enter" key +static void wait_user_control() +{ + char sign[5] = {0}; + while(strlen(sign) == 0) + { + /* Flush anything already in the RX buffer */ + while(uart_rx_one_char((uint8_t *) sign) == OK) { + } + /* Read line */ + UartRxString((uint8_t*) sign, sizeof(sign) - 1); + } +} + +void test_case_uses_tcpip() +{ + // Can be called more than once, does nothing on subsequent calls + tcpip_adapter_init(); + + // Allocate all sockets then free them + // (First time each socket is allocated some one-time allocations happen.) + int sockets[CONFIG_LWIP_MAX_SOCKETS]; + for (int i = 0; i < CONFIG_LWIP_MAX_SOCKETS; i++) { + int type = (i % 2 == 0) ? SOCK_DGRAM : SOCK_STREAM; + int family = (i % 3 == 0) ? PF_INET6 : PF_INET; + sockets[i] = socket(family, type, IPPROTO_IP); + } + for (int i = 0; i < CONFIG_LWIP_MAX_SOCKETS; i++) { + close(sockets[i]); + } + + // Allow LWIP tasks to finish initialising themselves + vTaskDelay(25 / portTICK_RATE_MS); + + printf("Note: tcpip_adapter_init() has been called. Until next reset, TCP/IP task will periodicially allocate memory and consume CPU time.\n"); + + // Reset the leak checker as LWIP allocates a lot of memory on first run + unity_reset_leak_checks(); +} + +// signal functions, used for sync between unity DUTs for multiple devices cases +void unity_wait_for_signal(const char* signal_name) +{ + printf("Waiting for signal: [%s]!\n" + "Please press \"Enter\" key to once any board send this signal.\n", signal_name); + wait_user_control(); +} + +void unity_send_signal(const char* signal_name) +{ + printf("Send signal: [%s]!\n", signal_name); +} + diff --git a/tools/unit-test-app/components/unity/unity_platform.c b/tools/unit-test-app/components/unity/unity_platform.c index 49cae755..02fcd887 100644 --- a/tools/unit-test-app/components/unity/unity_platform.c +++ b/tools/unit-test-app/components/unity/unity_platform.c @@ -2,16 +2,20 @@ #include #include #include - #include "unity.h" - +#include "rom/ets_sys.h" +#include "rom/uart.h" #include "freertos/FreeRTOS.h" #include "freertos/task.h" - -#include "driver/uart.h" - #include "esp_log.h" -#include "esp_system.h" +#include "esp_clk.h" +#include "soc/cpu.h" +#include "esp_heap_caps.h" +#include "test_utils.h" + +#ifdef CONFIG_HEAP_TRACING +#include "esp_heap_trace.h" +#endif // Pointers to the head and tail of linked list of test description structs: static struct test_desc_t* s_unity_tests_first = NULL; @@ -20,6 +24,10 @@ static struct test_desc_t* s_unity_tests_last = NULL; // Inverse of the filter static bool s_invert = false; + +static size_t before_free_8bit; +static size_t before_free_32bit; + /* Each unit test is allowed to "leak" this many bytes. TODO: Make this value editable by the test. @@ -29,12 +37,52 @@ static bool s_invert = false; const size_t WARN_LEAK_THRESHOLD = 256; const size_t CRITICAL_LEAK_THRESHOLD = 4096; -extern int uart_rx_one_char(char *c); +void unity_reset_leak_checks(void) +{ + before_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT); + before_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT); + +#ifdef CONFIG_HEAP_TRACING + heap_trace_start(HEAP_TRACE_LEAKS); +#endif +} /* setUp runs before every test */ void setUp(void) { +// If heap tracing is enabled in kconfig, leak trace the test +#ifdef CONFIG_HEAP_TRACING + const size_t num_heap_records = 80; + static heap_trace_record_t *record_buffer; + if (!record_buffer) { + record_buffer = malloc(sizeof(heap_trace_record_t) * num_heap_records); + assert(record_buffer); + heap_trace_init_standalone(record_buffer, num_heap_records); + } +#endif + printf("%s", ""); /* sneakily lazy-allocate the reent structure for this test task */ + get_test_data_partition(); /* allocate persistent partition table structures */ + + unity_reset_leak_checks(); +} + +static void check_leak(size_t before_free, size_t after_free, const char *type) +{ + if (before_free <= after_free) { + return; + } + size_t leaked = before_free - after_free; + if (leaked < WARN_LEAK_THRESHOLD) { + return; + } + + printf("MALLOC_CAP_%s %s leak: Before %u bytes free, After %u bytes free (delta %u)\n", + type, + leaked < CRITICAL_LEAK_THRESHOLD ? "potential" : "critical", + before_free, after_free, leaked); + fflush(stdout); + TEST_ASSERT_MESSAGE(leaked < CRITICAL_LEAK_THRESHOLD, "The test leaked too much memory"); } /* tearDown runs after every test */ @@ -47,6 +95,20 @@ void tearDown(void) const char *real_testfile = Unity.TestFile; Unity.TestFile = __FILE__; + /* check if unit test has caused heap corruption in any heap */ + TEST_ASSERT_MESSAGE( heap_caps_check_integrity(MALLOC_CAP_INVALID, true), "The test has corrupted the heap"); + + /* check for leaks */ +#ifdef CONFIG_HEAP_TRACING + heap_trace_stop(); + heap_trace_dump(); +#endif + size_t after_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT); + size_t after_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT); + + check_leak(before_free_8bit, after_free_8bit, "8BIT"); + check_leak(before_free_32bit, after_free_32bit, "32BIT"); + Unity.TestFile = real_testfile; // go back to the real filename } @@ -54,43 +116,21 @@ void unity_putc(int c) { if (c == '\n') { - putchar('\r'); - putchar('\n'); + uart_tx_one_char('\r'); + uart_tx_one_char('\n'); } else if (c == '\r') { } else { - putchar(c); + uart_tx_one_char(c); } } void unity_flush() { -// uart_tx_wait_idle(0); // assume that output goes to UART0 -} - -static int UART_RxString(char *s, size_t len) -{ - size_t i = 1; - char *s_local = s; - - while (i < len) { - while (uart_rx_one_char(s_local) != 0); - - if ((*s_local == '\n') || (*s_local == '\r')) { - break; - } - - s_local++; - i++; - } - - s_local++; - *s_local = '\0'; - - return 0; + uart_tx_wait_idle(0); // assume that output goes to UART0 } void unity_testcase_register(struct test_desc_t* desc) @@ -132,8 +172,10 @@ void multiple_function_option(const struct test_desc_t* test_ms) while(strlen(cmdline) == 0) { /* Flush anything already in the RX buffer */ - while(uart_rx_one_char(cmdline) == 0); - UART_RxString(cmdline, sizeof(cmdline) - 1); + while(uart_rx_one_char((uint8_t *) cmdline) == OK) { + + } + UartRxString((uint8_t*) cmdline, sizeof(cmdline) - 1); if(strlen(cmdline) == 0) { /* if input was newline, print a new menu */ print_multiple_function_test_menu(test_ms); @@ -152,6 +194,7 @@ static void unity_run_single_test(const struct test_desc_t* test) printf("Running %s...\n", test->name); // Unit test runner expects to see test name before the test starts fflush(stdout); + uart_tx_wait_idle(CONFIG_CONSOLE_UART_NUM); Unity.TestFile = test->file; Unity.CurrentDetail1 = test->desc; @@ -185,14 +228,12 @@ static void unity_run_single_test_by_index_parse(const char* filter, int index_m int test_index = strtol(filter, NULL, 10); if (test_index >= 1 && test_index <= index_max) { - extern uint32_t system_get_cpu_freq(void); - uint32_t start; - asm volatile ("rsr %0, CCOUNT" : "=r" (start)); + RSR(CCOUNT, start); unity_run_single_test_by_index(test_index - 1); uint32_t end; - asm volatile ("rsr %0, CCOUNT" : "=r" (end)); - uint32_t ms = (end - start) / (system_get_cpu_freq() * 1000000 / 1000); + RSR(CCOUNT, end); + uint32_t ms = (end - start) / (esp_clk_cpu_freq() / 1000); printf("Test ran in %dms\n", ms); } } @@ -273,6 +314,7 @@ static int print_test_menu(void) } } } + printf("\nEnter test for running.\n"); /* unit_test.py needs it for finding the end of test menu */ return test_counter; } @@ -298,9 +340,10 @@ void unity_run_menu() while(strlen(cmdline) == 0) { /* Flush anything already in the RX buffer */ - while(uart_rx_one_char(cmdline) == 0); + while(uart_rx_one_char((uint8_t *) cmdline) == OK) { + } /* Read input */ - UART_RxString(cmdline, sizeof(cmdline) - 1); + UartRxString((uint8_t*) cmdline, sizeof(cmdline) - 1); trim_trailing_space(cmdline); if(strlen(cmdline) == 0) { /* if input was newline, print a new menu */ diff --git a/tools/unit-test-app/configs/aes_no_hw b/tools/unit-test-app/configs/aes_no_hw new file mode 100644 index 00000000..f890e3f8 --- /dev/null +++ b/tools/unit-test-app/configs/aes_no_hw @@ -0,0 +1,3 @@ +TEST_EXCLUDE_COMPONENTS=libsodium bt app_update +TEST_COMPONENTS=mbedtls +CONFIG_MBEDTLS_HARDWARE_AES=n diff --git a/tools/unit-test-app/configs/app_update b/tools/unit-test-app/configs/app_update new file mode 100644 index 00000000..4069be24 --- /dev/null +++ b/tools/unit-test-app/configs/app_update @@ -0,0 +1,13 @@ +TEST_COMPONENTS=app_update +TEST_EXCLUDE_COMPONENTS=libsodium bt +CONFIG_UNITY_FREERTOS_STACK_SIZE=12288 +CONFIG_PARTITION_TABLE_CUSTOM=y +CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partition_table_unit_test_two_ota.csv" +CONFIG_PARTITION_TABLE_FILENAME="partition_table_unit_test_two_ota.csv" +CONFIG_PARTITION_TABLE_OFFSET=0x18000 +CONFIG_BOOTLOADER_FACTORY_RESET=y +CONFIG_BOOTLOADER_APP_TEST=y +CONFIG_BOOTLOADER_HOLD_TIME_GPIO=2 +CONFIG_BOOTLOADER_OTA_DATA_ERASE=y +CONFIG_BOOTLOADER_NUM_PIN_FACTORY_RESET=4 +CONFIG_BOOTLOADER_NUM_PIN_APP_TEST=32 \ No newline at end of file diff --git a/tools/unit-test-app/configs/bt b/tools/unit-test-app/configs/bt new file mode 100644 index 00000000..9bb48647 --- /dev/null +++ b/tools/unit-test-app/configs/bt @@ -0,0 +1,4 @@ +TEST_COMPONENTS=bt +TEST_EXCLUDE_COMPONENTS=app_update +CONFIG_BT_ENABLED=y +CONFIG_UNITY_FREERTOS_STACK_SIZE=12288 \ No newline at end of file diff --git a/tools/unit-test-app/configs/default b/tools/unit-test-app/configs/default new file mode 100644 index 00000000..f7f508a4 --- /dev/null +++ b/tools/unit-test-app/configs/default @@ -0,0 +1 @@ +TEST_EXCLUDE_COMPONENTS=libsodium bt app_update \ No newline at end of file diff --git a/tools/unit-test-app/configs/libsodium b/tools/unit-test-app/configs/libsodium new file mode 100644 index 00000000..af090a53 --- /dev/null +++ b/tools/unit-test-app/configs/libsodium @@ -0,0 +1,3 @@ +TEST_COMPONENTS=libsodium +TEST_EXCLUDE_COMPONENTS=bt app_update +CONFIG_UNITY_FREERTOS_STACK_SIZE=12288 \ No newline at end of file diff --git a/tools/unit-test-app/configs/psram b/tools/unit-test-app/configs/psram new file mode 100644 index 00000000..541bff57 --- /dev/null +++ b/tools/unit-test-app/configs/psram @@ -0,0 +1,3 @@ +TEST_EXCLUDE_COMPONENTS=libsodium bt app_update driver esp32 spi_flash +CONFIG_SPIRAM_SUPPORT=y +CONFIG_SPIRAM_BANKSWITCH_ENABLE=n diff --git a/tools/unit-test-app/configs/psram_2 b/tools/unit-test-app/configs/psram_2 new file mode 100644 index 00000000..4173606d --- /dev/null +++ b/tools/unit-test-app/configs/psram_2 @@ -0,0 +1,3 @@ +TEST_COMPONENTS=driver esp32 spi_flash +CONFIG_SPIRAM_SUPPORT=y +CONFIG_SPIRAM_BANKSWITCH_ENABLE=n diff --git a/tools/unit-test-app/configs/psram_8m b/tools/unit-test-app/configs/psram_8m new file mode 100644 index 00000000..d28e5870 --- /dev/null +++ b/tools/unit-test-app/configs/psram_8m @@ -0,0 +1,4 @@ +TEST_COMPONENTS=esp32 +CONFIG_SPIRAM_SUPPORT=y +CONFIG_SPIRAM_BANKSWITCH_ENABLE=y +CONFIG_SPIRAM_BANKSWITCH_RESERVE=8 diff --git a/tools/unit-test-app/configs/release b/tools/unit-test-app/configs/release new file mode 100644 index 00000000..86fbc9a4 --- /dev/null +++ b/tools/unit-test-app/configs/release @@ -0,0 +1,3 @@ +TEST_EXCLUDE_COMPONENTS=bt app_update +CONFIG_OPTIMIZATION_LEVEL_RELEASE=y +CONFIG_OPTIMIZATION_ASSERTIONS_SILENT=y \ No newline at end of file diff --git a/tools/unit-test-app/configs/single_core b/tools/unit-test-app/configs/single_core new file mode 100644 index 00000000..9c85abae --- /dev/null +++ b/tools/unit-test-app/configs/single_core @@ -0,0 +1,4 @@ +TEST_EXCLUDE_COMPONENTS=libsodium bt app_update +CONFIG_MEMMAP_SMP=n +CONFIG_FREERTOS_UNICORE=y +CONFIG_ESP32_RTCDATA_IN_FAST_MEM=y diff --git a/tools/unit-test-app/idf_ext.py b/tools/unit-test-app/idf_ext.py new file mode 100644 index 00000000..9bc9ae00 --- /dev/null +++ b/tools/unit-test-app/idf_ext.py @@ -0,0 +1,279 @@ +import sys +import glob +import tempfile +import os +import os.path +import re +import shutil +import argparse +import json +import copy + +PROJECT_NAME = "unit-test-app" +PROJECT_PATH = os.getcwd() + +# List of unit-test-app configurations. +# Each file in configs/ directory defines a configuration. The format is the +# same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults +# file from the project directory +CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs")) + +# Build (intermediate) and output (artifact) directories +BUILDS_DIR = os.path.join(PROJECT_PATH, "builds") +BINARIES_DIR = os.path.join(PROJECT_PATH, "output") + +# Convert the values passed to the -T parameter to corresponding cache entry definitions +# TESTS_ALL and TEST_COMPONENTS +class TestComponentAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + # Create a new of cache definition entry, adding previous elements + cache_entries = list() + + existing_entries = getattr(namespace, "define_cache_entry", []) + + if existing_entries: + cache_entries.extend(existing_entries) + + # Form -D arguments + if "all" in values: + cache_entries.append("TESTS_ALL=1") + cache_entries.append("TEST_COMPONENTS=''") + else: + cache_entries.append("TESTS_ALL=0") + cache_entries.append("TEST_COMPONENTS='%s'" % " ".join(values)) + + setattr(namespace, "define_cache_entry", cache_entries) + + # Brute force add reconfigure at the very beginning + existing_actions = getattr(namespace, "actions", []) + if not "reconfigure" in existing_actions: + existing_actions = ["reconfigure"] + existing_actions + setattr(namespace, "actions", existing_actions) + +class TestExcludeComponentAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + # Create a new of cache definition entry, adding previous elements + cache_entries = list() + + existing_entries = getattr(namespace, "define_cache_entry", []) + + if existing_entries: + cache_entries.extend(existing_entries) + + cache_entries.append("TEST_EXCLUDE_COMPONENTS='%s'" % " ".join(values)) + + setattr(namespace, "define_cache_entry", cache_entries) + + # Brute force add reconfigure at the very beginning + existing_actions = getattr(namespace, "actions", []) + if not "reconfigure" in existing_actions: + existing_actions = ["reconfigure"] + existing_actions + setattr(namespace, "actions", existing_actions) + +def add_argument_extensions(parser): + # For convenience, define a -T argument that gets converted to -D arguments + parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction) + # For convenience, define a -T argument that gets converted to -D arguments + parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction) + +def add_action_extensions(base_functions, base_actions): + + def ut_apply_config(ut_apply_config_name, args): + config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1) + + def set_config_build_variables(prop, defval = None): + property_value = re.match(r"^%s=(.*)" % prop, config_file_content) + if (property_value): + property_value = property_value.group(1) + else: + property_value = defval + + if (property_value): + try: + args.define_cache_entry.append("%s=" % prop + property_value) + except AttributeError: + args.define_cache_entry = ["%s=" % prop + property_value] + + return property_value + + sdkconfig_set = None + + if args.define_cache_entry: + sdkconfig_set = filter(lambda s: "SDKCONFIG=" in s, args.define_cache_entry) + + sdkconfig_path = os.path.join(args.project_dir, "sdkconfig") + + if sdkconfig_set: + sdkconfig_path = sdkconfig_set[-1].split("=")[1] + sdkconfig_path = os.path.abspath(sdkconfig_path) + + try: + os.remove(sdkconfig_path) + except OSError: + pass + + if config_name in CONFIG_NAMES: + # Parse the sdkconfig for components to be included/excluded and tests to be run + config = os.path.join(PROJECT_PATH, "configs", config_name) + + with open(config, "r") as config_file: + config_file_content = config_file.read() + + set_config_build_variables("EXCLUDE_COMPONENTS", "''") + + test_components = set_config_build_variables("TEST_COMPONENTS", "''") + + tests_all = None + if test_components == "''": + tests_all = "TESTS_ALL=1" + else: + tests_all = "TESTS_ALL=0" + + try: + args.define_cache_entry.append(tests_all) + except AttributeError: + args.define_cache_entry = [tests_all] + + set_config_build_variables("TEST_EXCLUDE_COMPONENTS","''") + + with tempfile.NamedTemporaryFile() as sdkconfig_temp: + # Use values from the combined defaults and the values from + # config folder to build config + sdkconfig_default = os.path.join(PROJECT_PATH, "sdkconfig.defaults") + + with open(sdkconfig_default, "rb") as sdkconfig_default_file: + sdkconfig_temp.write(sdkconfig_default_file.read()) + + sdkconfig_config = os.path.join(PROJECT_PATH, "configs", config_name) + with open(sdkconfig_config, "rb") as sdkconfig_config_file: + sdkconfig_temp.write(b"\n") + sdkconfig_temp.write(sdkconfig_config_file.read()) + + sdkconfig_temp.flush() + + try: + args.define_cache_entry.append("SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name) + except AttributeError: + args.define_cache_entry = ["SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name] + + reconfigure = base_functions["reconfigure"] + reconfigure(None, args) + else: + if not config_name == "all-configs": + print("unknown unit test app config for action '%s'" % ut_apply_config_name) + + # This target builds the configuration. It does not currently track dependencies, + # but is good enough for CI builds if used together with clean-all-configs. + # For local builds, use 'apply-config-NAME' target and then use normal 'all' + # and 'flash' targets. + def ut_build(ut_build_name, args): + # Create a copy of the passed arguments to prevent arg modifications to accrue if + # all configs are being built + build_args = copy.copy(args) + + config_name = re.match(r"ut-build-(.*)", ut_build_name).group(1) + + if config_name in CONFIG_NAMES: + build_args.build_dir = os.path.join(BUILDS_DIR, config_name) + + src = os.path.join(BUILDS_DIR, config_name) + dest = os.path.join(BINARIES_DIR, config_name) + + try: + os.makedirs(dest) + except OSError: + pass + + # Build, tweaking paths to sdkconfig and sdkconfig.defaults + ut_apply_config("ut-apply-config-" + config_name, build_args) + + build_target = base_functions["build_target"] + + build_target("all", build_args) + + # Copy artifacts to the output directory + shutil.copyfile(os.path.join(build_args.project_dir, "sdkconfig"), os.path.join(dest, "sdkconfig")) + + binaries = [PROJECT_NAME + x for x in [".elf", ".bin", ".map"]] + + for binary in binaries: + shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary)) + + try: + os.mkdir(os.path.join(dest, "bootloader")) + except OSError: + pass + + shutil.copyfile(os.path.join(src, "bootloader", "bootloader.bin"), os.path.join(dest, "bootloader", "bootloader.bin")) + + for partition_table in glob.glob(os.path.join(src, "partition_table", "partition-table*.bin")): + try: + os.mkdir(os.path.join(dest, "partition_table")) + except OSError: + pass + shutil.copyfile(partition_table, os.path.join(dest, "partition_table", os.path.basename(partition_table))) + + shutil.copyfile(os.path.join(src, "flash_project_args"), os.path.join(dest, "flash_project_args")) + + binaries = glob.glob(os.path.join(src, "*.bin")) + binaries = [os.path.basename(s) for s in binaries] + + for binary in binaries: + shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary)) + + else: + if not config_name == "all-configs": + print("unknown unit test app config for action '%s'" % ut_build_name) + + def ut_clean(ut_clean_name, args): + config_name = re.match(r"ut-clean-(.*)", ut_clean_name).group(1) + if config_name in CONFIG_NAMES: + shutil.rmtree(os.path.join(BUILDS_DIR, config_name), ignore_errors=True) + shutil.rmtree(os.path.join(BINARIES_DIR, config_name), ignore_errors=True) + else: + if not config_name == "all-configs": + print("unknown unit test app config for action '%s'" % ut_clean_name) + + def ut_help(action, args): + HELP_STRING = """ +Additional unit-test-app specific targets + +idf.py ut-build-NAME - Build unit-test-app with configuration provided in configs/NAME. + Build directory will be builds/NAME/, output binaries will be + under output/NAME/ + +idf.py ut-clean-NAME - Remove build and output directories for configuration NAME. + +idf.py ut-build-all-configs - Build all configurations defined in configs/ directory. + +idf.py ut-apply-config-NAME - Generates configuration based on configs/NAME in sdkconfig + file. After this, normal all/flash targets can be used. + Useful for development/debugging. +""" + print(HELP_STRING) + + # Build dictionary of action extensions + extensions = dict() + + # This generates per-config targets (clean, build, apply-config). + build_all_config_deps = [] + clean_all_config_deps = [] + + for config in CONFIG_NAMES: + config_build_action_name = "ut-build-" + config + config_clean_action_name = "ut-clean-" + config + config_apply_config_action_name = "ut-apply-config-" + config + + extensions[config_build_action_name] = (ut_build, [], []) + extensions[config_clean_action_name] = (ut_clean, [], []) + extensions[config_apply_config_action_name] = (ut_apply_config, [], []) + + build_all_config_deps.append(config_build_action_name) + clean_all_config_deps.append(config_clean_action_name) + + extensions["ut-build-all-configs"] = (ut_build, build_all_config_deps, []) + extensions["ut-clean-all-configs"] = (ut_clean, clean_all_config_deps, []) + + extensions["ut-help"] = (ut_help, [], []) + + base_actions.update(extensions) diff --git a/tools/unit-test-app/main/CMakeLists.txt b/tools/unit-test-app/main/CMakeLists.txt new file mode 100644 index 00000000..47f681d3 --- /dev/null +++ b/tools/unit-test-app/main/CMakeLists.txt @@ -0,0 +1,4 @@ +set(COMPONENT_SRCS "app_main.c") +set(COMPONENT_ADD_INCLUDEDIRS "") + +register_component() diff --git a/tools/unit-test-app/main/app_main.c b/tools/unit-test-app/main/app_main.c index dab076a8..73a8201f 100644 --- a/tools/unit-test-app/main/app_main.c +++ b/tools/unit-test-app/main/app_main.c @@ -1,11 +1,6 @@ #include -#include - -#include "esp_system.h" - #include "freertos/FreeRTOS.h" #include "freertos/task.h" - #include "unity.h" #include "unity_config.h" @@ -15,7 +10,10 @@ void unityTask(void *pvParameters) unity_run_menu(); /* Doesn't return */ } -void app_main(void) +void app_main() { - xTaskCreate(unityTask, "unityTask", 8192, NULL, UNITY_FREERTOS_PRIORITY, NULL); + // Note: if unpinning this task, change the way run times are calculated in + // unity_platform + xTaskCreatePinnedToCore(unityTask, "unityTask", UNITY_FREERTOS_STACK_SIZE, NULL, + UNITY_FREERTOS_PRIORITY, NULL, UNITY_FREERTOS_CPU); } diff --git a/tools/unit-test-app/partition_table_unit_test_app.csv b/tools/unit-test-app/partition_table_unit_test_app.csv new file mode 100644 index 00000000..432553f2 --- /dev/null +++ b/tools/unit-test-app/partition_table_unit_test_app.csv @@ -0,0 +1,17 @@ +# Special partition table for unit test app +# +# Name, Type, SubType, Offset, Size, Flags +# Note: if you change the phy_init or app partition offset, make sure to change the offset in Kconfig.projbuild +nvs, data, nvs, 0x9000, 0x4000 +otadata, data, ota, 0xd000, 0x2000 +phy_init, data, phy, 0xf000, 0x1000 +factory, 0, 0, 0x10000, 0x240000 +# these OTA partitions are used for tests, but can't fit real OTA apps in them +# (done this way to reduce total flash usage.) +ota_0, 0, ota_0, , 64K +ota_1, 0, ota_1, , 64K +# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests +flash_test, data, fat, , 528K +nvs_key, data, nvs_keys, , 0x1000, encrypted + +# Note: still 1MB of a 4MB flash left free for some other purpose diff --git a/tools/unit-test-app/partition_table_unit_test_two_ota.csv b/tools/unit-test-app/partition_table_unit_test_two_ota.csv new file mode 100644 index 00000000..7c698c17 --- /dev/null +++ b/tools/unit-test-app/partition_table_unit_test_two_ota.csv @@ -0,0 +1,11 @@ +# Special partition table for unit test app_update +# Name, Type, SubType, Offset, Size, Flags +nvs, data, nvs, , 0x4000 +otadata, data, ota, , 0x2000 +phy_init, data, phy, , 0x1000 +factory, 0, 0, , 0xB0000 +ota_0, 0, ota_0, , 0xB0000 +ota_1, 0, ota_1, , 0xB0000 +test, 0, test, , 0xB0000 +# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests +flash_test, data, fat, , 528K \ No newline at end of file diff --git a/tools/unit-test-app/sdkconfig.defaults b/tools/unit-test-app/sdkconfig.defaults new file mode 100644 index 00000000..badf01bb --- /dev/null +++ b/tools/unit-test-app/sdkconfig.defaults @@ -0,0 +1,30 @@ +CONFIG_LOG_BOOTLOADER_LEVEL_WARN=y +CONFIG_ESPTOOLPY_BAUD_921600B=y +CONFIG_ESPTOOLPY_FLASHSIZE_4MB=y +CONFIG_ESPTOOLPY_FLASHSIZE_DETECT=n +CONFIG_PARTITION_TABLE_CUSTOM=y +CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partition_table_unit_test_app.csv" +CONFIG_PARTITION_TABLE_FILENAME="partition_table_unit_test_app.csv" +CONFIG_PARTITION_TABLE_OFFSET=0x8000 +CONFIG_ESP32_DEFAULT_CPU_FREQ_240=y +CONFIG_ESP32_XTAL_FREQ_AUTO=y +CONFIG_FREERTOS_HZ=1000 +CONFIG_FREERTOS_WATCHPOINT_END_OF_STACK=y +CONFIG_FREERTOS_THREAD_LOCAL_STORAGE_POINTERS=3 +CONFIG_FREERTOS_USE_TRACE_FACILITY=y +CONFIG_HEAP_POISONING_COMPREHENSIVE=y +CONFIG_MBEDTLS_HARDWARE_MPI=y +CONFIG_MBEDTLS_MPI_USE_INTERRUPT=y +CONFIG_MBEDTLS_HARDWARE_SHA=y +CONFIG_SPI_FLASH_ENABLE_COUNTERS=y +CONFIG_ULP_COPROC_ENABLED=y +CONFIG_TASK_WDT=n +CONFIG_SPI_FLASH_WRITING_DANGEROUS_REGIONS_FAILS=y +CONFIG_FREERTOS_QUEUE_REGISTRY_SIZE=7 +CONFIG_STACK_CHECK_STRONG=y +CONFIG_STACK_CHECK=y +CONFIG_SUPPORT_STATIC_ALLOCATION=y +CONFIG_ESP_TIMER_PROFILING=y +CONFIG_ADC2_DISABLE_DAC=n +CONFIG_WARN_WRITE_STRINGS=y +CONFIG_SPI_MASTER_IN_IRAM=y \ No newline at end of file diff --git a/tools/unit-test-app/tools/ConfigDependency.yml b/tools/unit-test-app/tools/ConfigDependency.yml new file mode 100644 index 00000000..f7b265bc --- /dev/null +++ b/tools/unit-test-app/tools/ConfigDependency.yml @@ -0,0 +1,2 @@ +"psram": '{CONFIG_SPIRAM_SUPPORT=y} and not {CONFIG_SPIRAM_BANKSWITCH_ENABLE=y}' +"8Mpsram": "CONFIG_SPIRAM_BANKSWITCH_ENABLE=y" diff --git a/tools/unit-test-app/tools/CreateSectionTable.py b/tools/unit-test-app/tools/CreateSectionTable.py new file mode 100644 index 00000000..ca5114c6 --- /dev/null +++ b/tools/unit-test-app/tools/CreateSectionTable.py @@ -0,0 +1,163 @@ +# This file is used to process section data generated by `objdump -s` +import re + + +class Section(object): + """ + One Section of section table. contains info about section name, address and raw data + """ + SECTION_START_PATTERN = re.compile(b"Contents of section (.+?):") + DATA_PATTERN = re.compile(b"([0-9a-f]{4,8})") + + def __init__(self, name, start_address, data): + self.name = name + self.start_address = start_address + self.data = data + + def __contains__(self, item): + """ check if the section name and address match this section """ + if (item["section"] == self.name or item["section"] == "any") \ + and (self.start_address <= item["address"] < (self.start_address + len(self.data))): + return True + else: + return False + + def __getitem__(self, item): + """ + process slice. + convert absolute address to relative address in current section and return slice result + """ + if isinstance(item, int): + return self.data[item - self.start_address] + elif isinstance(item, slice): + start = item.start if item.start is None else item.start - self.start_address + stop = item.stop if item.stop is None else item.stop - self.start_address + return self.data[start:stop] + return self.data[item] + + def __str__(self): + return "%s [%08x - %08x]" % (self.name, self.start_address, self.start_address + len(self.data)) + + __repr__ = __str__ + + @classmethod + def parse_raw_data(cls, raw_data): + """ + process raw data generated by `objdump -s`, create section and return un-processed lines + :param raw_data: lines of raw data generated by `objdump -s` + :return: one section, un-processed lines + """ + name = "" + data = "" + start_address = 0 + # first find start line + for i, line in enumerate(raw_data): + if b"Contents of section " in line: # do strcmp first to speed up + match = cls.SECTION_START_PATTERN.search(line) + if match is not None: + name = match.group(1) + raw_data = raw_data[i + 1:] + break + else: + # do some error handling + raw_data = [b""] # add a dummy first data line + + def process_data_line(line_to_process): + # first remove the ascii part + hex_part = line_to_process.split(b" ")[0] + # process rest part + data_list = cls.DATA_PATTERN.findall(hex_part) + try: + _address = int(data_list[0], base=16) + except IndexError: + _address = -1 + + def hex_to_str(hex_data): + if len(hex_data) % 2 == 1: + hex_data = b"0" + hex_data # append zero at the beginning + _length = len(hex_data) + return "".join([chr(int(hex_data[_i:_i + 2], base=16)) + for _i in range(0, _length, 2)]) + + return _address, "".join([hex_to_str(x) for x in data_list[1:]]) + + # handle first line: + address, _data = process_data_line(raw_data[0]) + if address != -1: + start_address = address + data += _data + raw_data = raw_data[1:] + for i, line in enumerate(raw_data): + address, _data = process_data_line(line) + if address == -1: + raw_data = raw_data[i:] + break + else: + data += _data + else: + # do error handling + raw_data = [] + + section = cls(name, start_address, data) if start_address != -1 else None + unprocessed_data = None if len(raw_data) == 0 else raw_data + return section, unprocessed_data + + +class SectionTable(object): + """ elf section table """ + + def __init__(self, file_name): + with open(file_name, "rb") as f: + raw_data = f.readlines() + self.table = [] + while raw_data: + section, raw_data = Section.parse_raw_data(raw_data) + self.table.append(section) + + def get_unsigned_int(self, section, address, size=4, endian="LE"): + """ + get unsigned int from section table + :param section: section name; use "any" will only match with address + :param address: start address + :param size: size in bytes + :param endian: LE or BE + :return: int or None + """ + if address % 4 != 0 or size % 4 != 0: + print("warning: try to access without 4 bytes aligned") + key = {"address": address, "section": section} + for section in self.table: + if key in section: + tmp = section[address:address+size] + value = 0 + for i in range(size): + if endian == "LE": + value += ord(tmp[i]) << (i*8) + elif endian == "BE": + value += ord(tmp[i]) << ((size - i - 1) * 8) + else: + print("only support LE or BE for parameter endian") + assert False + break + else: + value = None + return value + + def get_string(self, section, address): + """ + get string ('\0' terminated) from section table + :param section: section name; use "any" will only match with address + :param address: start address + :return: string or None + """ + value = None + key = {"address": address, "section": section} + for section in self.table: + if key in section: + value = section[address:] + for i, c in enumerate(value): + if c == '\0': + value = value[:i] + break + break + return value diff --git a/tools/unit-test-app/tools/ModuleDefinition.yml b/tools/unit-test-app/tools/ModuleDefinition.yml new file mode 100644 index 00000000..0f9a31f9 --- /dev/null +++ b/tools/unit-test-app/tools/ModuleDefinition.yml @@ -0,0 +1,127 @@ +freertos: + module: System + module abbr: SYS + sub module: OS + sub module abbr: OS +nvs: + module: System + module abbr: SYS + sub module: NVS + sub module abbr: NVS +partition: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +ulp: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +fp: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +hw: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +tjpgd: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +miniz: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +mmap: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +bignum: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +newlib: + module: System + module abbr: SYS + sub module: Std Lib + sub module abbr: STD +aes: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +mbedtls: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +spi_flash: + module: Driver + module abbr: DRV + sub module: SPI + sub module abbr: SPI +spi_flash_read: + module: Driver + module abbr: DRV + sub module: SPI + sub module abbr: SPI +spi_flash_write: + module: Driver + module abbr: DRV + sub module: SPI + sub module abbr: SPI +esp32: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +deepsleep: + module: RTC + module abbr: RTC + sub module: Deep Sleep + sub module abbr: SLEEP +sd: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +cxx: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +fatfs: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +delay: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +spi: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +vfs: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC +misc: + module: System + module abbr: SYS + sub module: Misc + sub module abbr: MISC + + diff --git a/tools/unit-test-app/tools/TagDefinition.yml b/tools/unit-test-app/tools/TagDefinition.yml new file mode 100644 index 00000000..f84d9a70 --- /dev/null +++ b/tools/unit-test-app/tools/TagDefinition.yml @@ -0,0 +1,20 @@ +ignore: + # if the type exist but no value assigned + default: "Yes" + # if the type is not exist in tag list + omitted: "No" +test_env: + default: "UT_T1_1" + omitted: "UT_T1_1" +reset: + default: "POWERON_RESET" + omitted: " " +multi_device: + default: "Yes" + omitted: "No" +multi_stage: + default: "Yes" + omitted: "No" +timeout: + default: 30 + omitted: 30 diff --git a/tools/unit-test-app/tools/UnitTestParser.py b/tools/unit-test-app/tools/UnitTestParser.py new file mode 100644 index 00000000..b4c11c2a --- /dev/null +++ b/tools/unit-test-app/tools/UnitTestParser.py @@ -0,0 +1,318 @@ +from __future__ import print_function +import yaml +import os +import re +import shutil +import subprocess + +from copy import deepcopy +import CreateSectionTable + +TEST_CASE_PATTERN = { + "initial condition": "UTINIT1", + "SDK": "ESP32_IDF", + "level": "Unit", + "execution time": 0, + "auto test": "Yes", + "category": "Function", + "test point 1": "basic function", + "version": "v1 (2016-12-06)", + "test environment": "UT_T1_1", + "reset": "", + "expected result": "1. set succeed", + "cmd set": "test_unit_test_case", + "Test App": "UT", +} + + +class Parser(object): + """ parse unit test cases from build files and create files for test bench """ + + TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?") + DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]") + CONFIG_PATTERN = re.compile(r"{([^}]+)}") + + # file path (relative to idf path) + TAG_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "TagDefinition.yml") + MODULE_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "ModuleDefinition.yml") + CONFIG_DEPENDENCY_FILE = os.path.join("tools", "unit-test-app", "tools", "ConfigDependency.yml") + MODULE_ARTIFACT_FILE = os.path.join("components", "idf_test", "ModuleDefinition.yml") + TEST_CASE_FILE = os.path.join("components", "idf_test", "unit_test", "TestCaseAll.yml") + UT_BIN_FOLDER = os.path.join("tools", "unit-test-app", "output") + ELF_FILE = "unit-test-app.elf" + SDKCONFIG_FILE = "sdkconfig" + + def __init__(self, idf_path=os.getenv("IDF_PATH")): + self.test_env_tags = {} + self.unit_jobs = {} + self.file_name_cache = {} + self.idf_path = idf_path + self.tag_def = yaml.load(open(os.path.join(idf_path, self.TAG_DEF_FILE), "r")) + self.module_map = yaml.load(open(os.path.join(idf_path, self.MODULE_DEF_FILE), "r")) + self.config_dependencies = yaml.load(open(os.path.join(idf_path, self.CONFIG_DEPENDENCY_FILE), "r")) + # used to check if duplicated test case names + self.test_case_names = set() + self.parsing_errors = [] + + def parse_test_cases_for_one_config(self, config_output_folder, config_name): + """ + parse test cases from elf and save test cases need to be executed to unit test folder + :param config_output_folder: build folder of this config + :param config_name: built unit test config name + """ + elf_file = os.path.join(config_output_folder, self.ELF_FILE) + subprocess.check_output('xtensa-esp32-elf-objdump -t {} | grep test_desc > case_address.tmp'.format(elf_file), + shell=True) + subprocess.check_output('xtensa-esp32-elf-objdump -s {} > section_table.tmp'.format(elf_file), shell=True) + + table = CreateSectionTable.SectionTable("section_table.tmp") + tags = self.parse_tags(os.path.join(config_output_folder, self.SDKCONFIG_FILE)) + test_cases = [] + with open("case_address.tmp", "rb") as f: + for line in f: + # process symbol table like: "3ffb4310 l O .dram0.data 00000018 test_desc_33$5010" + line = line.split() + test_addr = int(line[0], 16) + section = line[3] + + name_addr = table.get_unsigned_int(section, test_addr, 4) + desc_addr = table.get_unsigned_int(section, test_addr + 4, 4) + file_name_addr = table.get_unsigned_int(section, test_addr + 12, 4) + function_count = table.get_unsigned_int(section, test_addr+20, 4) + name = table.get_string("any", name_addr) + desc = table.get_string("any", desc_addr) + file_name = table.get_string("any", file_name_addr) + tc = self.parse_one_test_case(name, desc, file_name, config_name, tags) + + # check if duplicated case names + # we need to use it to select case, + # if duplicated IDs, Unity could select incorrect case to run + # and we need to check all cases no matter if it's going te be executed by CI + # also add app_name here, we allow same case for different apps + if (tc["summary"] + config_name) in self.test_case_names: + self.parsing_errors.append("duplicated test case ID: " + tc["summary"]) + else: + self.test_case_names.add(tc["summary"] + config_name) + + if tc["CI ready"] == "Yes": + # update test env list and the cases of same env list + if tc["test environment"] in self.test_env_tags: + self.test_env_tags[tc["test environment"]].append(tc["ID"]) + else: + self.test_env_tags.update({tc["test environment"]: [tc["ID"]]}) + + if function_count > 1: + tc.update({"child case num": function_count}) + + # only add cases need to be executed + test_cases.append(tc) + + os.remove("section_table.tmp") + os.remove("case_address.tmp") + + return test_cases + + def parse_case_properities(self, tags_raw): + """ + parse test case tags (properities) with the following rules: + * first tag is always group of test cases, it's mandatory + * the rest tags should be [type=value]. + * if the type have default value, then [type] equal to [type=default_value]. + * if the type don't don't exist, then equal to [type=omitted_value] + default_value and omitted_value are defined in TagDefinition.yml + :param tags_raw: raw tag string + :return: tag dict + """ + tags = self.DESCRIPTION_PATTERN.findall(tags_raw) + assert len(tags) > 0 + p = dict([(k, self.tag_def[k]["omitted"]) for k in self.tag_def]) + p["module"] = tags[0] + + if p["module"] not in self.module_map: + p["module"] = "misc" + + # parsing rest tags, [type=value], =value is optional + for tag in tags[1:]: + match = self.TAG_PATTERN.search(tag) + assert match is not None + tag_type = match.group(1) + tag_value = match.group(3) + if match.group(2) == "=" and tag_value is None: + # [tag_type=] means tag_value is empty string + tag_value = "" + if tag_type in p: + if tag_value is None: + p[tag_type] = self.tag_def[tag_type]["default"] + else: + p[tag_type] = tag_value + else: + # ignore not defined tag type + pass + return p + + @staticmethod + def parse_tags_internal(sdkconfig, config_dependencies, config_pattern): + required_tags = [] + + def compare_config(config): + return config in sdkconfig + + def process_condition(condition): + matches = config_pattern.findall(condition) + if matches: + for config in matches: + compare_result = compare_config(config) + # replace all configs in condition with True or False according to compare result + condition = re.sub(config_pattern, str(compare_result), condition, count=1) + # Now the condition is a python condition, we can use eval to compute its value + ret = eval(condition) + else: + # didn't use complex condition. only defined one condition for the tag + ret = compare_config(condition) + return ret + + for tag in config_dependencies: + if process_condition(config_dependencies[tag]): + required_tags.append(tag) + + return required_tags + + def parse_tags(self, sdkconfig_file): + """ + Some test configs could requires different DUTs. + For example, if CONFIG_SPIRAM_SUPPORT is enabled, we need WROVER-Kit to run test. + This method will get tags for runners according to ConfigDependency.yml(maps tags to sdkconfig). + + We support to the following syntax:: + + # define the config which requires the tag + 'tag_a': 'config_a="value_a"' + # define the condition for the tag + 'tag_b': '{config A} and (not {config B} or (not {config C} and {config D}))' + + :param sdkconfig_file: sdk config file of the unit test config + :return: required tags for runners + """ + + with open(sdkconfig_file, "r") as f: + configs_raw_data = f.read() + + configs = configs_raw_data.splitlines(False) + + return self.parse_tags_internal(configs, self.config_dependencies, self.CONFIG_PATTERN) + + def parse_one_test_case(self, name, description, file_name, config_name, tags): + """ + parse one test case + :param name: test case name (summary) + :param description: test case description (tag string) + :param file_name: the file defines this test case + :param config_name: built unit test app name + :param tags: tags to select runners + :return: parsed test case + """ + prop = self.parse_case_properities(description) + + test_case = deepcopy(TEST_CASE_PATTERN) + test_case.update({"config": config_name, + "module": self.module_map[prop["module"]]['module'], + "CI ready": "No" if prop["ignore"] == "Yes" else "Yes", + "ID": name, + "test point 2": prop["module"], + "steps": name, + "test environment": prop["test_env"], + "reset": prop["reset"], + "sub module": self.module_map[prop["module"]]['sub module'], + "summary": name, + "multi_device": prop["multi_device"], + "multi_stage": prop["multi_stage"], + "timeout": int(prop["timeout"]), + "tags": tags}) + return test_case + + def dump_test_cases(self, test_cases): + """ + dump parsed test cases to YAML file for test bench input + :param test_cases: parsed test cases + """ + with open(os.path.join(self.idf_path, self.TEST_CASE_FILE), "w+") as f: + yaml.dump({"test cases": test_cases}, f, allow_unicode=True, default_flow_style=False) + + def copy_module_def_file(self): + """ copy module def file to artifact path """ + src = os.path.join(self.idf_path, self.MODULE_DEF_FILE) + dst = os.path.join(self.idf_path, self.MODULE_ARTIFACT_FILE) + shutil.copy(src, dst) + + def parse_test_cases(self): + """ parse test cases from multiple built unit test apps """ + test_cases = [] + + output_folder = os.path.join(self.idf_path, self.UT_BIN_FOLDER) + test_configs = os.listdir(output_folder) + for config in test_configs: + config_output_folder = os.path.join(output_folder, config) + if os.path.exists(config_output_folder): + test_cases.extend(self.parse_test_cases_for_one_config(config_output_folder, config)) + test_cases.sort(key=lambda x: x["config"] + x["summary"]) + self.dump_test_cases(test_cases) + + +def test_parser(): + parser = Parser() + # test parsing tags + # parsing module only and module in module list + prop = parser.parse_case_properities("[esp32]") + assert prop["module"] == "esp32" + # module not in module list + prop = parser.parse_case_properities("[not_in_list]") + assert prop["module"] == "misc" + # parsing a default tag, a tag with assigned value + prop = parser.parse_case_properities("[esp32][ignore][test_env=ABCD][not_support1][not_support2=ABCD]") + assert prop["ignore"] == "Yes" and prop["test_env"] == "ABCD" \ + and "not_support1" not in prop and "not_supported2" not in prop + # parsing omitted value + prop = parser.parse_case_properities("[esp32]") + assert prop["ignore"] == "No" and prop["test_env"] == "UT_T1_1" + # parsing with incorrect format + try: + parser.parse_case_properities("abcd") + assert False + except AssertionError: + pass + # skip invalid data parse, [type=] assigns empty string to type + prop = parser.parse_case_properities("[esp32]abdc aaaa [ignore=]") + assert prop["module"] == "esp32" and prop["ignore"] == "" + # skip mis-paired [] + prop = parser.parse_case_properities("[esp32][[ignore=b]][]][test_env=AAA]]") + assert prop["module"] == "esp32" and prop["ignore"] == "b" and prop["test_env"] == "AAA" + + config_dependency = { + 'a': '123', + 'b': '456', + 'c': 'not {123}', + 'd': '{123} and not {456}', + 'e': '{123} and not {789}', + 'f': '({123} and {456}) or ({123} and {789})' + } + sdkconfig = ["123", "789"] + tags = parser.parse_tags_internal(sdkconfig, config_dependency, parser.CONFIG_PATTERN) + assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8 + + +def main(): + test_parser() + + idf_path = os.getenv("IDF_PATH") + + parser = Parser(idf_path) + parser.parse_test_cases() + parser.copy_module_def_file() + if len(parser.parsing_errors) > 0: + for error in parser.parsing_errors: + print(error) + exit(-1) + + +if __name__ == '__main__': + main() diff --git a/tools/unit-test-app/unit_test.py b/tools/unit-test-app/unit_test.py new file mode 100755 index 00000000..eb300564 --- /dev/null +++ b/tools/unit-test-app/unit_test.py @@ -0,0 +1,748 @@ +#!/usr/bin/env python +# +# Copyright 2018 Espressif Systems (Shanghai) PTE LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Test script for unit test case. +""" + +import re +import os +import sys +import time +import argparse +import threading + +# if we want to run test case outside `tiny-test-fw` folder, +# we need to insert tiny-test-fw path into sys path +test_fw_path = os.getenv("TEST_FW_PATH") +if test_fw_path and test_fw_path not in sys.path: + sys.path.insert(0, test_fw_path) + +import TinyFW +import IDF +import Utility +import Env +from DUT import ExpectTimeout +from IDF.IDFApp import UT + + +UT_APP_BOOT_UP_DONE = "Press ENTER to see the list of tests." +RESET_PATTERN = re.compile(r"(ets [\w]{3}\s+[\d]{1,2} [\d]{4} [\d]{2}:[\d]{2}:[\d]{2}[^()]*\([\w].*?\))") +EXCEPTION_PATTERN = re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))") +ABORT_PATTERN = re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)") +FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored") +END_LIST_STR = r'\r?\nEnter test for running' +TEST_PATTERN = re.compile(r'\((\d+)\)\s+"([^"]+)" ([^\r\n]+)\r?\n(' + END_LIST_STR + r')?') +TEST_SUBMENU_PATTERN = re.compile(r'\s+\((\d+)\)\s+"[^"]+"\r?\n(?=(?=\()|(' + END_LIST_STR + r'))') + +SIMPLE_TEST_ID = 0 +MULTI_STAGE_ID = 1 +MULTI_DEVICE_ID = 2 + +DEFAULT_TIMEOUT = 20 + +DUT_STARTUP_CHECK_RETRY_COUNT = 5 +TEST_HISTROY_CHECK_TIMEOUT = 1 + + +def format_test_case_config(test_case_data): + """ + convert the test case data to unified format. + We need to following info to run unit test cases: + + 1. unit test app config + 2. test case name + 3. test case reset info + + the formatted case config is a dict, with ut app config as keys. The value is a list of test cases. + Each test case is a dict with "name" and "reset" as keys. For example:: + + case_config = { + "default": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, {...}], + "psram": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}], + } + + If config is not specified for test case, then + + :param test_case_data: string, list, or a dictionary list + :return: formatted data + """ + + case_config = dict() + + def parse_case(one_case_data): + """ parse and format one case """ + + def process_reset_list(reset_list): + # strip space and remove white space only items + _output = list() + for _r in reset_list: + _data = _r.strip(" ") + if _data: + _output.append(_data) + return _output + + _case = dict() + if isinstance(one_case_data, str): + _temp = one_case_data.split(" [reset=") + _case["name"] = _temp[0] + try: + _case["reset"] = process_reset_list(_temp[1][0:-1].split(",")) + except IndexError: + _case["reset"] = list() + elif isinstance(one_case_data, dict): + _case = one_case_data.copy() + assert "name" in _case + if "reset" not in _case: + _case["reset"] = list() + else: + if isinstance(_case["reset"], str): + _case["reset"] = process_reset_list(_case["reset"].split(",")) + else: + raise TypeError("Not supported type during parsing unit test case") + + if "config" not in _case: + _case["config"] = "default" + + return _case + + if not isinstance(test_case_data, list): + test_case_data = [test_case_data] + + for case_data in test_case_data: + parsed_case = parse_case(case_data) + try: + case_config[parsed_case["config"]].append(parsed_case) + except KeyError: + case_config[parsed_case["config"]] = [parsed_case] + + return case_config + + +def replace_app_bin(dut, name, new_app_bin): + if new_app_bin is None: + return + search_pattern = '/{}.bin'.format(name) + for i, config in enumerate(dut.download_config): + if config.endswith(search_pattern): + dut.download_config[i] = new_app_bin + Utility.console_log("The replaced application binary is {}".format(new_app_bin), "O") + break + + +def reset_dut(dut): + dut.reset() + # esptool ``run`` cmd takes quite long time. + # before reset finish, serial port is closed. therefore DUT could already bootup before serial port opened. + # this could cause checking bootup print failed. + # now use input cmd `-` and check test history to check if DUT is bootup. + # we'll retry this step for a few times, + # in case `dut.reset` returns during DUT bootup (when DUT can't process any command). + for _ in range(DUT_STARTUP_CHECK_RETRY_COUNT): + dut.write("-") + try: + dut.expect("0 Tests 0 Failures 0 Ignored", timeout=TEST_HISTROY_CHECK_TIMEOUT) + break + except ExpectTimeout: + pass + else: + raise AssertionError("Reset {} ({}) failed!".format(dut.name, dut.port)) + + +def run_one_normal_case(dut, one_case, junit_test_case, failed_cases): + + reset_dut(dut) + + dut.start_capture_raw_data() + # run test case + dut.write("\"{}\"".format(one_case["name"])) + dut.expect("Running " + one_case["name"] + "...") + + exception_reset_list = [] + + # we want to set this flag in callbacks (inner functions) + # use list here so we can use append to set this flag + test_finish = list() + + # expect callbacks + def one_case_finish(result): + """ one test finished, let expect loop break and log result """ + test_finish.append(True) + output = dut.stop_capture_raw_data() + if result: + Utility.console_log("Success: " + one_case["name"], color="green") + else: + failed_cases.append(one_case["name"]) + Utility.console_log("Failed: " + one_case["name"], color="red") + junit_test_case.add_failure_info(output) + + def handle_exception_reset(data): + """ + just append data to exception list. + exception list will be checked in ``handle_reset_finish``, once reset finished. + """ + exception_reset_list.append(data[0]) + + def handle_test_finish(data): + """ test finished without reset """ + # in this scenario reset should not happen + assert not exception_reset_list + if int(data[1]): + # case ignored + Utility.console_log("Ignored: " + one_case["name"], color="orange") + junit_test_case.add_skipped_info("ignored") + one_case_finish(not int(data[0])) + + def handle_reset_finish(data): + """ reset happened and reboot finished """ + assert exception_reset_list # reboot but no exception/reset logged. should never happen + result = False + if len(one_case["reset"]) == len(exception_reset_list): + for i, exception in enumerate(exception_reset_list): + if one_case["reset"][i] not in exception: + break + else: + result = True + if not result: + err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"], + exception_reset_list) + Utility.console_log(err_msg, color="orange") + junit_test_case.add_error_info(err_msg) + one_case_finish(result) + + while not test_finish: + try: + dut.expect_any((RESET_PATTERN, handle_exception_reset), + (EXCEPTION_PATTERN, handle_exception_reset), + (ABORT_PATTERN, handle_exception_reset), + (FINISH_PATTERN, handle_test_finish), + (UT_APP_BOOT_UP_DONE, handle_reset_finish), + timeout=one_case["timeout"]) + except ExpectTimeout: + Utility.console_log("Timeout in expect", color="orange") + junit_test_case.add_error_info("timeout") + one_case_finish(False) + break + + +@IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +def run_unit_test_cases(env, extra_data): + """ + extra_data can be three types of value + 1. as string: + 1. "case_name" + 2. "case_name [reset=RESET_REASON]" + 2. as dict: + 1. with key like {"name": "Intr_alloc test, shared ints"} + 2. with key like {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET", "config": "psram"} + 3. as list of string or dict: + [case1, case2, case3, {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, ...] + + :param env: test env instance + :param extra_data: the case name or case list or case dictionary + :return: None + """ + + case_config = format_test_case_config(extra_data) + + # we don't want stop on failed case (unless some special scenarios we can't handle) + # this flag is used to log if any of the case failed during executing + # Before exit test function this flag is used to log if the case fails + failed_cases = [] + + for ut_config in case_config: + Utility.console_log("Running unit test for config: " + ut_config, "O") + dut = env.get_dut("unit-test-app", app_path=ut_config) + if len(case_config[ut_config]) > 0: + replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) + dut.start_app() + Utility.console_log("Download finished, start running test cases", "O") + + for one_case in case_config[ut_config]: + # create junit report test case + junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"])) + try: + run_one_normal_case(dut, one_case, junit_test_case, failed_cases) + TinyFW.JunitReport.test_case_finish(junit_test_case) + except Exception as e: + junit_test_case.add_error_info("Unexpected exception: " + str(e)) + TinyFW.JunitReport.test_case_finish(junit_test_case) + + # raise exception if any case fails + if failed_cases: + Utility.console_log("Failed Cases:", color="red") + for _case_name in failed_cases: + Utility.console_log("\t" + _case_name, color="red") + raise AssertionError("Unit Test Failed") + + +class Handler(threading.Thread): + + WAIT_SIGNAL_PATTERN = re.compile(r'Waiting for signal: \[(.+)\]!') + SEND_SIGNAL_PATTERN = re.compile(r'Send signal: \[(.+)\]!') + FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored") + + def __init__(self, dut, sent_signal_list, lock, parent_case_name, child_case_index, timeout): + self.dut = dut + self.sent_signal_list = sent_signal_list + self.lock = lock + self.parent_case_name = parent_case_name + self.child_case_name = "" + self.child_case_index = child_case_index + 1 + self.finish = False + self.result = False + self.output = "" + self.fail_name = None + self.timeout = timeout + self.force_stop = threading.Event() # it show the running status + + reset_dut(self.dut) # reset the board to make it start from begining + + threading.Thread.__init__(self, name="{} Handler".format(dut)) + + def run(self): + + self.dut.start_capture_raw_data() + + def get_child_case_name(data): + self.child_case_name = data[0] + time.sleep(1) + self.dut.write(str(self.child_case_index)) + + def one_device_case_finish(result): + """ one test finished, let expect loop break and log result """ + self.finish = True + self.result = result + self.output = "[{}]\n\n{}\n".format(self.child_case_name, + self.dut.stop_capture_raw_data()) + if not result: + self.fail_name = self.child_case_name + + def device_wait_action(data): + start_time = time.time() + expected_signal = data[0] + while 1: + if time.time() > start_time + self.timeout: + Utility.console_log("Timeout in device for function: %s" % self.child_case_name, color="orange") + break + with self.lock: + if expected_signal in self.sent_signal_list: + self.dut.write(" ") + self.sent_signal_list.remove(expected_signal) + break + time.sleep(0.01) + + def device_send_action(data): + with self.lock: + self.sent_signal_list.append(data[0].encode('utf-8')) + + def handle_device_test_finish(data): + """ test finished without reset """ + # in this scenario reset should not happen + if int(data[1]): + # case ignored + Utility.console_log("Ignored: " + self.child_case_name, color="orange") + one_device_case_finish(not int(data[0])) + + try: + time.sleep(1) + self.dut.write("\"{}\"".format(self.parent_case_name)) + self.dut.expect("Running " + self.parent_case_name + "...") + except ExpectTimeout: + Utility.console_log("No case detected!", color="orange") + while not self.finish and not self.force_stop.isSet(): + try: + self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), + get_child_case_name), + (self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern + (self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern + (self.FINISH_PATTERN, handle_device_test_finish), # test finish pattern + timeout=self.timeout) + except ExpectTimeout: + Utility.console_log("Timeout in expect", color="orange") + one_device_case_finish(False) + break + + def stop(self): + self.force_stop.set() + + +def get_case_info(one_case): + parent_case = one_case["name"] + child_case_num = one_case["child case num"] + return parent_case, child_case_num + + +def get_dut(duts, env, name, ut_config, app_bin=None): + if name in duts: + dut = duts[name] + else: + dut = env.get_dut(name, app_path=ut_config) + duts[name] = dut + replace_app_bin(dut, "unit-test-app", app_bin) + dut.start_app() # download bin to board + return dut + + +def run_one_multiple_devices_case(duts, ut_config, env, one_case, failed_cases, app_bin, junit_test_case): + lock = threading.RLock() + threads = [] + send_signal_list = [] + result = True + parent_case, case_num = get_case_info(one_case) + + for i in range(case_num): + dut = get_dut(duts, env, "dut%d" % i, ut_config, app_bin) + threads.append(Handler(dut, send_signal_list, lock, + parent_case, i, one_case["timeout"])) + for thread in threads: + thread.setDaemon(True) + thread.start() + output = "Multiple Device Failed\n" + for thread in threads: + thread.join() + result = result and thread.result + output += thread.output + if not thread.result: + [thd.stop() for thd in threads] + + if result: + Utility.console_log("Success: " + one_case["name"], color="green") + else: + failed_cases.append(one_case["name"]) + junit_test_case.add_failure_info(output) + Utility.console_log("Failed: " + one_case["name"], color="red") + + +@IDF.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True) +def run_multiple_devices_cases(env, extra_data): + """ + extra_data can be two types of value + 1. as dict: + e.g. + {"name": "gpio master/slave test example", + "child case num": 2, + "config": "release", + "env_tag": "UT_T2_1"} + 2. as list dict: + e.g. + [{"name": "gpio master/slave test example1", + "child case num": 2, + "config": "release", + "env_tag": "UT_T2_1"}, + {"name": "gpio master/slave test example2", + "child case num": 2, + "config": "release", + "env_tag": "UT_T2_1"}] + + """ + failed_cases = [] + case_config = format_test_case_config(extra_data) + duts = {} + for ut_config in case_config: + Utility.console_log("Running unit test for config: " + ut_config, "O") + for one_case in case_config[ut_config]: + junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"])) + try: + run_one_multiple_devices_case(duts, ut_config, env, one_case, failed_cases, + one_case.get('app_bin'), junit_test_case) + TinyFW.JunitReport.test_case_finish(junit_test_case) + except Exception as e: + junit_test_case.add_error_info("Unexpected exception: " + str(e)) + TinyFW.JunitReport.test_case_finish(junit_test_case) + + if failed_cases: + Utility.console_log("Failed Cases:", color="red") + for _case_name in failed_cases: + Utility.console_log("\t" + _case_name, color="red") + raise AssertionError("Unit Test Failed") + + +def run_one_multiple_stage_case(dut, one_case, failed_cases, junit_test_case): + reset_dut(dut) + + dut.start_capture_raw_data() + + exception_reset_list = [] + + for test_stage in range(one_case["child case num"]): + # select multi stage test case name + dut.write("\"{}\"".format(one_case["name"])) + dut.expect("Running " + one_case["name"] + "...") + # select test function for current stage + dut.write(str(test_stage + 1)) + + # we want to set this flag in callbacks (inner functions) + # use list here so we can use append to set this flag + stage_finish = list() + + def last_stage(): + return test_stage == one_case["child case num"] - 1 + + def check_reset(): + if one_case["reset"]: + assert exception_reset_list # reboot but no exception/reset logged. should never happen + result = False + if len(one_case["reset"]) == len(exception_reset_list): + for i, exception in enumerate(exception_reset_list): + if one_case["reset"][i] not in exception: + break + else: + result = True + if not result: + err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"], + exception_reset_list) + Utility.console_log(err_msg, color="orange") + junit_test_case.add_error_info(err_msg) + else: + # we allow omit reset in multi stage cases + result = True + return result + + # expect callbacks + def one_case_finish(result): + """ one test finished, let expect loop break and log result """ + # handle test finish + result = result and check_reset() + output = dut.stop_capture_raw_data() + if result: + Utility.console_log("Success: " + one_case["name"], color="green") + else: + failed_cases.append(one_case["name"]) + Utility.console_log("Failed: " + one_case["name"], color="red") + junit_test_case.add_failure_info(output) + stage_finish.append("break") + + def handle_exception_reset(data): + """ + just append data to exception list. + exception list will be checked in ``handle_reset_finish``, once reset finished. + """ + exception_reset_list.append(data[0]) + + def handle_test_finish(data): + """ test finished without reset """ + # in this scenario reset should not happen + if int(data[1]): + # case ignored + Utility.console_log("Ignored: " + one_case["name"], color="orange") + junit_test_case.add_skipped_info("ignored") + # only passed in last stage will be regarded as real pass + if last_stage(): + one_case_finish(not int(data[0])) + else: + Utility.console_log("test finished before enter last stage", color="orange") + one_case_finish(False) + + def handle_next_stage(data): + """ reboot finished. we goto next stage """ + if last_stage(): + # already last stage, should never goto next stage + Utility.console_log("didn't finish at last stage", color="orange") + one_case_finish(False) + else: + stage_finish.append("continue") + + while not stage_finish: + try: + dut.expect_any((RESET_PATTERN, handle_exception_reset), + (EXCEPTION_PATTERN, handle_exception_reset), + (ABORT_PATTERN, handle_exception_reset), + (FINISH_PATTERN, handle_test_finish), + (UT_APP_BOOT_UP_DONE, handle_next_stage), + timeout=one_case["timeout"]) + except ExpectTimeout: + Utility.console_log("Timeout in expect", color="orange") + one_case_finish(False) + break + if stage_finish[0] == "break": + # test breaks on current stage + break + + +@IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +def run_multiple_stage_cases(env, extra_data): + """ + extra_data can be 2 types of value + 1. as dict: Mandantory keys: "name" and "child case num", optional keys: "reset" and others + 3. as list of string or dict: + [case1, case2, case3, {"name": "restart from PRO CPU", "child case num": 2}, ...] + + :param env: test env instance + :param extra_data: the case name or case list or case dictionary + :return: None + """ + + case_config = format_test_case_config(extra_data) + + # we don't want stop on failed case (unless some special scenarios we can't handle) + # this flag is used to log if any of the case failed during executing + # Before exit test function this flag is used to log if the case fails + failed_cases = [] + + for ut_config in case_config: + Utility.console_log("Running unit test for config: " + ut_config, "O") + dut = env.get_dut("unit-test-app", app_path=ut_config) + if len(case_config[ut_config]) > 0: + replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) + dut.start_app() + + for one_case in case_config[ut_config]: + junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"])) + try: + run_one_multiple_stage_case(dut, one_case, failed_cases, junit_test_case) + TinyFW.JunitReport.test_case_finish(junit_test_case) + except Exception as e: + junit_test_case.add_error_info("Unexpected exception: " + str(e)) + TinyFW.JunitReport.test_case_finish(junit_test_case) + + # raise exception if any case fails + if failed_cases: + Utility.console_log("Failed Cases:", color="red") + for _case_name in failed_cases: + Utility.console_log("\t" + _case_name, color="red") + raise AssertionError("Unit Test Failed") + + +def detect_update_unit_test_info(env, extra_data, app_bin): + + case_config = format_test_case_config(extra_data) + + for ut_config in case_config: + dut = env.get_dut("unit-test-app", app_path=ut_config) + replace_app_bin(dut, "unit-test-app", app_bin) + dut.start_app() + + reset_dut(dut) + + # get the list of test cases + dut.write("") + dut.expect("Here's the test menu, pick your combo:", timeout=DEFAULT_TIMEOUT) + + def find_update_dic(name, _t, _timeout, child_case_num=None): + for _case_data in extra_data: + if _case_data['name'] == name: + _case_data['type'] = _t + if 'timeout' not in _case_data: + _case_data['timeout'] = _timeout + if child_case_num: + _case_data['child case num'] = child_case_num + + try: + while True: + data = dut.expect(TEST_PATTERN, timeout=DEFAULT_TIMEOUT) + test_case_name = data[1] + m = re.search(r'\[timeout=(\d+)\]', data[2]) + if m: + timeout = int(m.group(1)) + else: + timeout = 30 + m = re.search(r'\[multi_stage\]', data[2]) + if m: + test_case_type = MULTI_STAGE_ID + else: + m = re.search(r'\[multi_device\]', data[2]) + if m: + test_case_type = MULTI_DEVICE_ID + else: + test_case_type = SIMPLE_TEST_ID + find_update_dic(test_case_name, test_case_type, timeout) + if data[3] and re.search(END_LIST_STR, data[3]): + break + continue + # find the last submenu item + data = dut.expect(TEST_SUBMENU_PATTERN, timeout=DEFAULT_TIMEOUT) + find_update_dic(test_case_name, test_case_type, timeout, child_case_num=int(data[0])) + if data[1] and re.search(END_LIST_STR, data[1]): + break + # check if the unit test case names are correct, i.e. they could be found in the device + for _dic in extra_data: + if 'type' not in _dic: + raise ValueError("Unit test \"{}\" doesn't exist in the flashed device!".format(_dic.get('name'))) + except ExpectTimeout: + Utility.console_log("Timeout during getting the test list", color="red") + finally: + dut.close() + + # These options are the same for all configs, therefore there is no need to continue + break + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument( + '--repeat', '-r', + help='Number of repetitions for the test(s). Default is 1.', + type=int, + default=1 + ) + parser.add_argument("--env_config_file", "-e", + help="test env config file", + default=None + ) + parser.add_argument("--app_bin", "-b", + help="application binary file for flashing the chip", + default=None + ) + parser.add_argument( + 'test', + help='Comma separated list of