|
|
@@ -0,0 +1,260 @@
|
|
|
+#####################
|
|
|
+# Default Variables #
|
|
|
+#####################
|
|
|
+stages:
|
|
|
+ - upload_cache
|
|
|
+ - pre_check
|
|
|
+ - build
|
|
|
+ - assign_test
|
|
|
+ - build_doc
|
|
|
+ - target_test
|
|
|
+ - host_test
|
|
|
+ - test_deploy
|
|
|
+ - deploy
|
|
|
+ - post_deploy
|
|
|
+
|
|
|
+variables:
|
|
|
+# System environment
|
|
|
+
|
|
|
+ # Common parameters for the 'make' during CI tests
|
|
|
+ MAKEFLAGS: "-j5 --no-keep-going"
|
|
|
+
|
|
|
+# GitLab-CI environment
|
|
|
+
|
|
|
+ # XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here.
|
|
|
+ # Use values from "CI / CD Settings" - "Variables".
|
|
|
+
|
|
|
+ # GIT_STRATEGY is not defined here.
|
|
|
+ # Use an option from "CI / CD Settings" - "General pipelines".
|
|
|
+
|
|
|
+ # we will download archive for each submodule instead of clone.
|
|
|
+ # we don't do "recursive" when fetch submodule as they're not used in CI now.
|
|
|
+ GIT_SUBMODULE_STRATEGY: none
|
|
|
+ SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
|
|
|
+ # by default we will fetch all submodules
|
|
|
+ # jobs can overwrite this variable to only fetch submodules they required
|
|
|
+ # set to "none" if don't need to fetch submodules
|
|
|
+ SUBMODULES_TO_FETCH: "all"
|
|
|
+ # tell build system do not check submodule update as we download archive instead of clone
|
|
|
+ IDF_SKIP_CHECK_SUBMODULES: 1
|
|
|
+
|
|
|
+ IDF_PATH: "$CI_PROJECT_DIR"
|
|
|
+ BATCH_BUILD: "1"
|
|
|
+ V: "0"
|
|
|
+ CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
|
|
|
+ PYTHON_VER: 3.8.17
|
|
|
+
|
|
|
+ # Docker images
|
|
|
+ BOT_DOCKER_IMAGE_TAG: ":latest"
|
|
|
+
|
|
|
+ ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env-v5.2:2"
|
|
|
+ ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env-v5.2:2-1"
|
|
|
+ QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522"
|
|
|
+ TARGET_TEST_ENV_IMAGE: "$CI_DOCKER_REGISTRY/target-test-env-v5.2:2"
|
|
|
+
|
|
|
+ SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
|
|
|
+
|
|
|
+ PRE_COMMIT_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1"
|
|
|
+
|
|
|
+ # target test config file, used by assign test job
|
|
|
+ CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml"
|
|
|
+
|
|
|
+ # target test repo parameters
|
|
|
+ TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
|
|
+
|
|
|
+ # cache python dependencies
|
|
|
+ PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
|
|
+
|
|
|
+ # Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The
|
|
|
+ # branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint
|
|
|
+ # file from https://dl.espressif.com/dl/esp-idf.
|
|
|
+ CI_PYTHON_CONSTRAINT_BRANCH: ""
|
|
|
+
|
|
|
+ # Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
|
|
|
+ CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt"
|
|
|
+
|
|
|
+ # Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
|
|
|
+ # Keep the variable empty when not used.
|
|
|
+ CI_PYTHON_TOOL_REPO: ""
|
|
|
+
|
|
|
+ # Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The
|
|
|
+ # branch name must be without the remote part ("origin/"). Keep the variable empty when not used.
|
|
|
+ # This is used only if CI_PYTHON_TOOL_REPO is not empty.
|
|
|
+ CI_PYTHON_TOOL_BRANCH: ""
|
|
|
+
|
|
|
+ IDF_CI_BUILD: 1
|
|
|
+
|
|
|
+################################################
|
|
|
+# `before_script` and `after_script` Templates #
|
|
|
+################################################
|
|
|
+.common_before_scripts: &common-before_scripts |
|
|
|
+ source tools/ci/utils.sh
|
|
|
+ is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
|
|
+
|
|
|
+ if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
|
|
+ export IDF_MIRROR_PREFIX_MAP=
|
|
|
+ fi
|
|
|
+
|
|
|
+ if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then
|
|
|
+ export INCLUDE_NIGHTLY_RUN="1"
|
|
|
+ fi
|
|
|
+
|
|
|
+ # configure cmake related flags
|
|
|
+ source tools/ci/configure_ci_environment.sh
|
|
|
+
|
|
|
+ # add extra python packages
|
|
|
+ export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
|
|
+
|
|
|
+.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
|
|
+ # must use after setup_tools_except_target_test
|
|
|
+ # otherwise the export.sh won't work properly
|
|
|
+
|
|
|
+ # download constraint file for dev
|
|
|
+ if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
|
|
|
+ wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
|
|
|
+ mkdir -p ~/.espressif
|
|
|
+ mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
|
|
+ fi
|
|
|
+
|
|
|
+ # Mirror
|
|
|
+ if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
|
|
+ export IDF_MIRROR_PREFIX_MAP=
|
|
|
+ fi
|
|
|
+
|
|
|
+ # install latest python packages
|
|
|
+ # target test jobs
|
|
|
+ if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
|
|
+ # ttfw jobs
|
|
|
+ if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
|
|
+ run_cmd bash install.sh --enable-ci --enable-ttfw
|
|
|
+ else
|
|
|
+ run_cmd bash install.sh --enable-ci --enable-pytest
|
|
|
+ fi
|
|
|
+ elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
|
|
+ run_cmd bash install.sh --enable-ci --enable-docs
|
|
|
+ elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
|
|
+ run_cmd bash install.sh --enable-ci --enable-pytest
|
|
|
+ else
|
|
|
+ if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
|
|
+ run_cmd bash install.sh --enable-ci
|
|
|
+ else
|
|
|
+ run_cmd bash install.sh --enable-ci --enable-pytest
|
|
|
+ fi
|
|
|
+ fi
|
|
|
+
|
|
|
+ # Install esp-clang if necessary
|
|
|
+ if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
|
|
|
+ $IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
|
|
+ fi
|
|
|
+
|
|
|
+ source ./export.sh
|
|
|
+
|
|
|
+ # Custom clang
|
|
|
+ if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
|
|
+ echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
|
|
|
+ wget $CI_CLANG_DISTRO_URL
|
|
|
+ ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
|
|
|
+ tar -x -f $ARCH_NAME
|
|
|
+ export PATH=$PWD/esp-clang/bin:$PATH
|
|
|
+ fi
|
|
|
+
|
|
|
+ # Custom OpenOCD
|
|
|
+ if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
|
|
|
+ echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
|
|
|
+ wget $OOCD_DISTRO_URL
|
|
|
+ ARCH_NAME=$(basename $OOCD_DISTRO_URL)
|
|
|
+ tar -x -f $ARCH_NAME
|
|
|
+ export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
|
|
|
+ export PATH=$PWD/openocd-esp32/bin:$PATH
|
|
|
+ fi
|
|
|
+
|
|
|
+ if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
|
|
|
+ git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git
|
|
|
+ pip install ./${CI_PYTHON_TOOL_REPO}
|
|
|
+ rm -rf ${CI_PYTHON_TOOL_REPO}
|
|
|
+ fi
|
|
|
+
|
|
|
+.show_ccache_statistics: &show_ccache_statistics |
|
|
|
+ # Show ccache statistics if enabled globally
|
|
|
+ test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
|
|
+
|
|
|
+.upload_built_binaries_to_s3: &upload_built_binaries_to_s3 |
|
|
|
+ # upload the binary files to s3 server
|
|
|
+ echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server"
|
|
|
+ shopt -s globstar
|
|
|
+ # use || true to bypass the no-file error
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/*.bin || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/*.elf || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/*.map || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true
|
|
|
+ zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true
|
|
|
+ shopt -u globstar
|
|
|
+ mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip || true
|
|
|
+ echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K"
|
|
|
+ echo "Please download the full binary files (including *.elf and *.map files) from the following share link"
|
|
|
+ # would be clean up after 4 days
|
|
|
+ mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h
|
|
|
+
|
|
|
+.before_script:minimal:
|
|
|
+ before_script:
|
|
|
+ - *common-before_scripts
|
|
|
+
|
|
|
+.before_script:build:macos:
|
|
|
+ before_script:
|
|
|
+ - *common-before_scripts
|
|
|
+ # On macOS, these tools need to be installed
|
|
|
+ - export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
|
|
+ - $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
|
|
+ # This adds tools (compilers) and the version-specific Python environment to PATH
|
|
|
+ - *setup_tools_and_idf_python_venv
|
|
|
+ - fetch_submodules
|
|
|
+
|
|
|
+.before_script:build:
|
|
|
+ before_script:
|
|
|
+ - *common-before_scripts
|
|
|
+ - *setup_tools_and_idf_python_venv
|
|
|
+ - add_gitlab_ssh_keys
|
|
|
+ - fetch_submodules
|
|
|
+ - export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
|
|
|
+ - export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
|
|
|
+
|
|
|
+.after_script:build:ccache:
|
|
|
+ after_script:
|
|
|
+ - *show_ccache_statistics
|
|
|
+
|
|
|
+.after_script:build:ccache-upload:
|
|
|
+ after_script:
|
|
|
+ - *show_ccache_statistics
|
|
|
+ - *upload_built_binaries_to_s3
|
|
|
+
|
|
|
+#############
|
|
|
+# `default` #
|
|
|
+#############
|
|
|
+default:
|
|
|
+ cache:
|
|
|
+ # pull only for most of the use cases since it's cache dir.
|
|
|
+ # Only set "push" policy for "upload_cache" stage jobs
|
|
|
+ - key: pip-cache
|
|
|
+ paths:
|
|
|
+ - .cache/pip
|
|
|
+ policy: pull
|
|
|
+ - key: submodule-cache
|
|
|
+ paths:
|
|
|
+ - .cache/submodule_archives
|
|
|
+ policy: pull
|
|
|
+ before_script:
|
|
|
+ - *common-before_scripts
|
|
|
+ - *setup_tools_and_idf_python_venv
|
|
|
+ - add_gitlab_ssh_keys
|
|
|
+ - fetch_submodules
|
|
|
+ retry:
|
|
|
+ max: 2
|
|
|
+ when:
|
|
|
+ # In case of a runner failure we could hop to another one, or a network error could go away.
|
|
|
+ - runner_system_failure
|
|
|
+ # Job execution timeout may be caused by a network issue.
|
|
|
+ - job_execution_timeout
|