mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-14 06:04:19 +00:00
ci: apply idf-ci for artifacts
This commit is contained in:
@@ -255,72 +255,4 @@ In ESP-IDF CI, there's a few more special rules are additionally supported to di
|
|||||||
|
|
||||||
## Upload/Download Artifacts to Internal Minio Server
|
## Upload/Download Artifacts to Internal Minio Server
|
||||||
|
|
||||||
### Users Without Access to Minio
|
Please refer to the documentation [here](https://docs.espressif.com/projects/idf-ci/en/latest/guides/cli.html)
|
||||||
|
|
||||||
If you don't have access to the internal Minio server, you can still download the artifacts from the shared link in the job log.
|
|
||||||
|
|
||||||
The log will look like this:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
Pipeline ID : 587355
|
|
||||||
Job name : build_clang_test_apps_esp32
|
|
||||||
Job ID : 40272275
|
|
||||||
Created archive file: 40272275.zip, uploading as 587355/build_dir_without_map_and_elf_files/build_clang_test_apps_esp32/40272275.zip
|
|
||||||
Please download the archive file includes build_dir_without_map_and_elf_files from [INTERNAL_URL]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Users With Access to Minio
|
|
||||||
|
|
||||||
#### Env Vars for Minio
|
|
||||||
|
|
||||||
Minio takes these env vars to connect to the server:
|
|
||||||
|
|
||||||
- `IDF_S3_SERVER`
|
|
||||||
- `IDF_S3_ACCESS_KEY`
|
|
||||||
- `IDF_S3_SECRET_KEY`
|
|
||||||
- `IDF_S3_BUCKET`
|
|
||||||
|
|
||||||
#### Artifacts Types and File Patterns
|
|
||||||
|
|
||||||
The artifacts types and corresponding file patterns are defined in tools/ci/artifacts_handler.py, inside `ArtifactType` and `TYPE_PATTERNS_DICT`.
|
|
||||||
|
|
||||||
#### Upload
|
|
||||||
|
|
||||||
```shell
|
|
||||||
python tools/ci/artifacts_handler.py upload
|
|
||||||
```
|
|
||||||
|
|
||||||
will upload the files that match the file patterns to minio object storage with name:
|
|
||||||
|
|
||||||
`<pipeline_id>/<artifact_type>/<job_name>/<job_id>.zip`
|
|
||||||
|
|
||||||
For example, job 39043328 will upload these four files:
|
|
||||||
|
|
||||||
- `575500/map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
|
||||||
- `575500/build_dir_without_map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
|
||||||
- `575500/logs/build_pytest_examples_esp32/39043328.zip`
|
|
||||||
- `575500/size_reports/build_pytest_examples_esp32/39043328.zip`
|
|
||||||
|
|
||||||
#### Download
|
|
||||||
|
|
||||||
You may run
|
|
||||||
|
|
||||||
```shell
|
|
||||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
to download all files of the pipeline, or
|
|
||||||
|
|
||||||
```shell
|
|
||||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern>
|
|
||||||
```
|
|
||||||
|
|
||||||
to download all files with the specified job name or pattern, or
|
|
||||||
|
|
||||||
```shell
|
|
||||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern> --type <artifact_type> <artifact_type> ...
|
|
||||||
```
|
|
||||||
|
|
||||||
to download all files with the specified job name or pattern and artifact type(s).
|
|
||||||
|
|
||||||
You may check all detailed documentation with `python tools/ci/artifacts_handler.py download -h`
|
|
||||||
|
@@ -21,7 +21,7 @@
|
|||||||
- pipeline_variables
|
- pipeline_variables
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
|
# The other artifacts patterns are defined under .idf_ci.toml
|
||||||
# Now we're uploading/downloading the binary files from our internal storage server
|
# Now we're uploading/downloading the binary files from our internal storage server
|
||||||
#
|
#
|
||||||
# keep the log file to help debug
|
# keep the log file to help debug
|
||||||
@@ -44,7 +44,6 @@
|
|||||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
--modified-files ${MR_MODIFIED_FILES}
|
||||||
$TEST_BUILD_OPTS_EXTRA
|
$TEST_BUILD_OPTS_EXTRA
|
||||||
- python tools/ci/artifacts_handler.py upload
|
|
||||||
|
|
||||||
######################
|
######################
|
||||||
# build_template_app #
|
# build_template_app #
|
||||||
@@ -206,7 +205,7 @@ build_clang_test_apps_esp32p4:
|
|||||||
script:
|
script:
|
||||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||||
- cd ${IDF_PATH}/tools/test_build_system
|
- cd ${IDF_PATH}/tools/test_build_system
|
||||||
- python ${IDF_PATH}/tools/ci/get_known_failure_cases_file.py
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- pytest
|
- pytest
|
||||||
--cleanup-idf-copy
|
--cleanup-idf-copy
|
||||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||||
|
@@ -120,7 +120,7 @@ variables:
|
|||||||
source tools/ci/configure_ci_environment.sh
|
source tools/ci/configure_ci_environment.sh
|
||||||
|
|
||||||
# add extra python packages
|
# add extra python packages
|
||||||
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||||
|
|
||||||
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
||||||
# must use after setup_tools_except_target_test
|
# must use after setup_tools_except_target_test
|
||||||
@@ -217,7 +217,7 @@ variables:
|
|||||||
|
|
||||||
.upload_failed_job_log_artifacts: &upload_failed_job_log_artifacts |
|
.upload_failed_job_log_artifacts: &upload_failed_job_log_artifacts |
|
||||||
if [ $CI_JOB_STATUS = "failed" ]; then
|
if [ $CI_JOB_STATUS = "failed" ]; then
|
||||||
python tools/ci/artifacts_handler.py upload --type logs
|
run_cmd idf-ci gitlab upload-artifacts --type log
|
||||||
fi
|
fi
|
||||||
|
|
||||||
.before_script:minimal:
|
.before_script:minimal:
|
||||||
|
@@ -303,7 +303,7 @@ test_pytest_qemu:
|
|||||||
--only-test-related
|
--only-test-related
|
||||||
-m qemu
|
-m qemu
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
--modified-files ${MR_MODIFIED_FILES}
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- run_cmd pytest
|
- run_cmd pytest
|
||||||
--target $IDF_TARGET
|
--target $IDF_TARGET
|
||||||
--log-cli-level DEBUG
|
--log-cli-level DEBUG
|
||||||
@@ -331,7 +331,7 @@ test_pytest_linux:
|
|||||||
--target linux
|
--target linux
|
||||||
--only-test-related
|
--only-test-related
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
--modified-files ${MR_MODIFIED_FILES}
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- run_cmd pytest
|
- run_cmd pytest
|
||||||
--target linux
|
--target linux
|
||||||
--embedded-services idf
|
--embedded-services idf
|
||||||
@@ -365,7 +365,7 @@ test_pytest_macos:
|
|||||||
--only-test-related
|
--only-test-related
|
||||||
-m macos
|
-m macos
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
--modified-files ${MR_MODIFIED_FILES}
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- run_cmd pytest
|
- run_cmd pytest
|
||||||
--target linux
|
--target linux
|
||||||
-m macos
|
-m macos
|
||||||
|
@@ -144,9 +144,9 @@ pipeline_variables:
|
|||||||
fi
|
fi
|
||||||
- echo "OOCD_DISTRO_URL_ARMHF=$OOCD_DISTRO_URL_ARMHF" >> pipeline.env
|
- echo "OOCD_DISTRO_URL_ARMHF=$OOCD_DISTRO_URL_ARMHF" >> pipeline.env
|
||||||
- echo "OOCD_DISTRO_URL_ARM64=$OOCD_DISTRO_URL_ARM64" >> pipeline.env
|
- echo "OOCD_DISTRO_URL_ARM64=$OOCD_DISTRO_URL_ARM64" >> pipeline.env
|
||||||
- idf-ci gitlab pipeline-variables >> pipeline.env
|
- run_cmd idf-ci gitlab pipeline-variables >> pipeline.env
|
||||||
- cat pipeline.env
|
- cat pipeline.env
|
||||||
- python tools/ci/artifacts_handler.py upload --type modified_files_and_components_report
|
- run_cmd idf-ci gitlab upload-artifacts --type env
|
||||||
artifacts:
|
artifacts:
|
||||||
reports:
|
reports:
|
||||||
dotenv: pipeline.env
|
dotenv: pipeline.env
|
||||||
|
57
.idf_ci.toml
57
.idf_ci.toml
@@ -9,6 +9,9 @@ IDF_CI_BUILD = "1"
|
|||||||
[gitlab]
|
[gitlab]
|
||||||
|
|
||||||
[gitlab.build_pipeline]
|
[gitlab.build_pipeline]
|
||||||
|
workflow_name = "build_child_pipeline"
|
||||||
|
presigned_json_job_name = 'generate_pytest_build_report'
|
||||||
|
|
||||||
job_tags = ['build', 'shiny']
|
job_tags = ['build', 'shiny']
|
||||||
job_template_name = '.dynamic_build_template'
|
job_template_name = '.dynamic_build_template'
|
||||||
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||||
@@ -38,3 +41,57 @@ include:
|
|||||||
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||||
- tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml
|
- tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.debug]
|
||||||
|
bucket = "idf-artifacts"
|
||||||
|
patterns = [
|
||||||
|
'**/build*/bootloader/*.map',
|
||||||
|
'**/build*/bootloader/*.elf',
|
||||||
|
'**/build*/*.map',
|
||||||
|
'**/build*/*.elf',
|
||||||
|
# customized
|
||||||
|
'**/build*/esp_tee/*.map',
|
||||||
|
'**/build*/esp_tee/*.elf',
|
||||||
|
'**/build*/gdbinit/*',
|
||||||
|
]
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.flash]
|
||||||
|
bucket = "idf-artifacts"
|
||||||
|
patterns = [
|
||||||
|
'**/build*/bootloader/*.bin',
|
||||||
|
'**/build*/*.bin',
|
||||||
|
'**/build*/partition_table/*.bin',
|
||||||
|
'**/build*/flasher_args.json',
|
||||||
|
'**/build*/flash_project_args',
|
||||||
|
'**/build*/config/sdkconfig.json',
|
||||||
|
'**/build*/sdkconfig',
|
||||||
|
'**/build*/project_description.json',
|
||||||
|
# customized
|
||||||
|
'**/build*/esp_tee/*.bin',
|
||||||
|
]
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.log]
|
||||||
|
bucket = "idf-artifacts"
|
||||||
|
patterns = [
|
||||||
|
'**/build*/build_log.txt',
|
||||||
|
'**/build*/size.json',
|
||||||
|
]
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.junit]
|
||||||
|
bucket = "idf-artifacts"
|
||||||
|
patterns = [
|
||||||
|
'XUNIT_RESULT_*.xml',
|
||||||
|
]
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.env]
|
||||||
|
bucket = "idf-artifacts"
|
||||||
|
patterns = [
|
||||||
|
'pipeline.env',
|
||||||
|
]
|
||||||
|
|
||||||
|
[gitlab.artifacts.s3.longterm]
|
||||||
|
bucket = "longterm"
|
||||||
|
if_clause = '"$CI_COMMIT_REF_NAME" == "master"'
|
||||||
|
patterns = [
|
||||||
|
'**/build*/size.json',
|
||||||
|
]
|
||||||
|
123
conftest.py
123
conftest.py
@@ -9,6 +9,7 @@
|
|||||||
# please report to https://github.com/espressif/pytest-embedded/issues
|
# please report to https://github.com/espressif/pytest-embedded/issues
|
||||||
# or discuss at https://github.com/espressif/pytest-embedded/discussions
|
# or discuss at https://github.com/espressif/pytest-embedded/discussions
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if os.path.join(os.path.dirname(__file__), 'tools', 'ci') not in sys.path:
|
if os.path.join(os.path.dirname(__file__), 'tools', 'ci') not in sys.path:
|
||||||
@@ -17,29 +18,20 @@ if os.path.join(os.path.dirname(__file__), 'tools', 'ci') not in sys.path:
|
|||||||
if os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages') not in sys.path:
|
if os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages') not in sys.path:
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
|
||||||
|
|
||||||
import io
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
import zipfile
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
import common_test_methods # noqa: F401
|
import common_test_methods # noqa: F401
|
||||||
import gitlab_api
|
import gitlab_api
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
|
||||||
import yaml
|
|
||||||
from _pytest.config import Config
|
from _pytest.config import Config
|
||||||
from _pytest.fixtures import FixtureRequest
|
from _pytest.fixtures import FixtureRequest
|
||||||
from artifacts_handler import ArtifactType
|
|
||||||
from dynamic_pipelines.constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
|
||||||
from idf_ci import PytestCase
|
from idf_ci import PytestCase
|
||||||
from idf_ci.idf_pytest import IDF_CI_PYTEST_CASE_KEY
|
from idf_ci.idf_pytest import IDF_CI_PYTEST_CASE_KEY
|
||||||
from idf_ci_local.uploader import AppDownloader
|
|
||||||
from idf_ci_local.uploader import AppUploader
|
|
||||||
from idf_ci_utils import IDF_PATH
|
|
||||||
from idf_ci_utils import idf_relpath
|
from idf_ci_utils import idf_relpath
|
||||||
from idf_pytest.constants import DEFAULT_LOGDIR
|
from idf_pytest.constants import DEFAULT_LOGDIR
|
||||||
from idf_pytest.plugin import IDF_LOCAL_PLUGIN_KEY
|
from idf_pytest.plugin import IDF_LOCAL_PLUGIN_KEY
|
||||||
@@ -96,69 +88,76 @@ def pipeline_id(request: FixtureRequest) -> t.Optional[str]:
|
|||||||
return request.config.getoption('pipeline_id', None) or os.getenv('PARENT_PIPELINE_ID', None) # type: ignore
|
return request.config.getoption('pipeline_id', None) or os.getenv('PARENT_PIPELINE_ID', None) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class BuildReportDownloader(AppDownloader):
|
def get_pipeline_commit_sha_by_pipeline_id(pipeline_id: str) -> t.Optional[str]:
|
||||||
def __init__(self, presigned_url_yaml: str) -> None:
|
gl = gitlab_api.Gitlab(os.getenv('CI_PROJECT_ID', 'espressif/esp-idf'))
|
||||||
self.app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = yaml.safe_load(presigned_url_yaml)
|
pipeline = gl.project.pipelines.get(pipeline_id)
|
||||||
|
if not pipeline:
|
||||||
|
return None
|
||||||
|
|
||||||
def _download_app(self, app_build_path: str, artifact_type: ArtifactType) -> None:
|
commit = gl.project.commits.get(pipeline.sha)
|
||||||
url = self.app_presigned_urls_dict[app_build_path][artifact_type.value]
|
if not commit or not commit.parent_ids:
|
||||||
|
return None
|
||||||
|
|
||||||
logging.info('Downloading app from %s', url)
|
if len(commit.parent_ids) == 1:
|
||||||
with io.BytesIO() as f:
|
return commit.parent_ids[0] # type: ignore
|
||||||
for chunk in requests.get(url).iter_content(chunk_size=1024 * 1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
f.seek(0)
|
for parent_id in commit.parent_ids:
|
||||||
|
parent_commit = gl.project.commits.get(parent_id)
|
||||||
|
if parent_commit.parent_ids and len(parent_commit.parent_ids) == 1:
|
||||||
|
return parent_id # type: ignore
|
||||||
|
|
||||||
with zipfile.ZipFile(f) as zip_ref:
|
return None
|
||||||
zip_ref.extractall(IDF_PATH)
|
|
||||||
|
|
||||||
def download_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
|
|
||||||
if app_build_path not in self.app_presigned_urls_dict:
|
|
||||||
raise ValueError(
|
|
||||||
f'No presigned url found for {app_build_path}. '
|
|
||||||
f'Usually this should not happen, please re-trigger a pipeline.'
|
|
||||||
f'If this happens again, please report this bug to the CI channel.'
|
|
||||||
)
|
|
||||||
|
|
||||||
super().download_app(app_build_path, artifact_type)
|
class AppDownloader:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
commit_sha: str,
|
||||||
|
pipeline_id: t.Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
self.commit_sha = commit_sha
|
||||||
|
self.pipeline_id = pipeline_id
|
||||||
|
|
||||||
|
def download_app(self, app_build_path: str, artifact_type: t.Optional[str] = None) -> None:
|
||||||
|
args = [
|
||||||
|
'idf-ci',
|
||||||
|
'gitlab',
|
||||||
|
'download-artifacts',
|
||||||
|
'--commit-sha',
|
||||||
|
self.commit_sha,
|
||||||
|
]
|
||||||
|
if artifact_type:
|
||||||
|
args.extend(['--type', artifact_type])
|
||||||
|
if self.pipeline_id:
|
||||||
|
args.extend(['--pipeline-id', self.pipeline_id])
|
||||||
|
args.append(app_build_path)
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
args,
|
||||||
|
stdout=sys.stdout,
|
||||||
|
stderr=sys.stderr,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
PRESIGNED_JSON = 'presigned.json'
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def app_downloader(pipeline_id: t.Optional[str]) -> t.Optional[AppDownloader]:
|
def app_downloader(
|
||||||
|
pipeline_id: t.Optional[str],
|
||||||
|
) -> t.Optional[AppDownloader]:
|
||||||
if not pipeline_id:
|
if not pipeline_id:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if (
|
commit_sha = get_pipeline_commit_sha_by_pipeline_id(pipeline_id)
|
||||||
'IDF_S3_BUCKET' in os.environ
|
if not commit_sha:
|
||||||
and 'IDF_S3_ACCESS_KEY' in os.environ
|
raise ValueError(
|
||||||
and 'IDF_S3_SECRET_KEY' in os.environ
|
'commit sha cannot be found for pipeline id %s. Please check the pipeline id. '
|
||||||
and 'IDF_S3_SERVER' in os.environ
|
'If you think this is a bug, please report it to CI team',
|
||||||
and 'IDF_S3_BUCKET' in os.environ
|
)
|
||||||
):
|
logging.debug('pipeline commit sha of pipeline %s is %s', pipeline_id, commit_sha)
|
||||||
return AppUploader(pipeline_id)
|
|
||||||
|
|
||||||
logging.info('Downloading build report from the build pipeline %s', pipeline_id)
|
return AppDownloader(commit_sha, pipeline_id)
|
||||||
test_app_presigned_urls_file = None
|
|
||||||
|
|
||||||
gl = gitlab_api.Gitlab(os.getenv('CI_PROJECT_ID', 'espressif/esp-idf'))
|
|
||||||
|
|
||||||
for child_pipeline in gl.project.pipelines.get(pipeline_id, lazy=True).bridges.list(iterator=True):
|
|
||||||
if child_pipeline.name == 'build_child_pipeline':
|
|
||||||
for job in gl.project.pipelines.get(child_pipeline.downstream_pipeline['id'], lazy=True).jobs.list(
|
|
||||||
iterator=True
|
|
||||||
):
|
|
||||||
if job.name == 'generate_pytest_build_report':
|
|
||||||
test_app_presigned_urls_file = gl.download_artifact(
|
|
||||||
job.id, [TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME]
|
|
||||||
)[0]
|
|
||||||
break
|
|
||||||
|
|
||||||
if test_app_presigned_urls_file:
|
|
||||||
return BuildReportDownloader(test_app_presigned_urls_file)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@@ -189,7 +188,7 @@ def build_dir(
|
|||||||
if requires_elf_or_map(case):
|
if requires_elf_or_map(case):
|
||||||
app_downloader.download_app(app_build_path)
|
app_downloader.download_app(app_build_path)
|
||||||
else:
|
else:
|
||||||
app_downloader.download_app(app_build_path, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES)
|
app_downloader.download_app(app_build_path, 'flash')
|
||||||
check_dirs = [f'build_{target}_{config}']
|
check_dirs = [f'build_{target}_{config}']
|
||||||
else:
|
else:
|
||||||
check_dirs = []
|
check_dirs = []
|
||||||
@@ -390,8 +389,8 @@ def pytest_addoption(parser: pytest.Parser) -> None:
|
|||||||
)
|
)
|
||||||
idf_group.addoption(
|
idf_group.addoption(
|
||||||
'--pipeline-id',
|
'--pipeline-id',
|
||||||
help='main pipeline id, not the child pipeline id. Specify this option to download the artifacts '
|
help='For users without s3 access. main pipeline id, not the child pipeline id. '
|
||||||
'from the minio server for debugging purpose.',
|
'Specify this option to download the artifacts from the minio server for debugging purpose.',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,214 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
import argparse
|
|
||||||
import fnmatch
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import typing as t
|
|
||||||
import zipfile
|
|
||||||
from enum import Enum
|
|
||||||
from pathlib import Path
|
|
||||||
from zipfile import ZipFile
|
|
||||||
|
|
||||||
import urllib3
|
|
||||||
from idf_ci_utils import sanitize_job_name
|
|
||||||
from minio import Minio
|
|
||||||
|
|
||||||
|
|
||||||
class ArtifactType(str, Enum):
|
|
||||||
MAP_AND_ELF_FILES = 'map_and_elf_files'
|
|
||||||
BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES = 'build_dir_without_map_and_elf_files'
|
|
||||||
|
|
||||||
LOGS = 'logs'
|
|
||||||
SIZE_REPORTS = 'size_reports'
|
|
||||||
JUNIT_REPORTS = 'junit_reports'
|
|
||||||
MODIFIED_FILES_AND_COMPONENTS_REPORT = 'modified_files_and_components_report'
|
|
||||||
|
|
||||||
|
|
||||||
TYPE_PATTERNS_DICT = {
|
|
||||||
ArtifactType.MAP_AND_ELF_FILES: [
|
|
||||||
'**/build*/bootloader/*.map',
|
|
||||||
'**/build*/bootloader/*.elf',
|
|
||||||
'**/build*/esp_tee/*.map',
|
|
||||||
'**/build*/esp_tee/*.elf',
|
|
||||||
'**/build*/*.map',
|
|
||||||
'**/build*/*.elf',
|
|
||||||
],
|
|
||||||
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
|
|
||||||
'**/build*/build_log.txt',
|
|
||||||
'**/build*/*.bin',
|
|
||||||
'**/build*/bootloader/*.bin',
|
|
||||||
'**/build*/esp_tee/*.bin',
|
|
||||||
'**/build*/partition_table/*.bin',
|
|
||||||
'**/build*/flasher_args.json',
|
|
||||||
'**/build*/flash_project_args',
|
|
||||||
'**/build*/config/sdkconfig.json',
|
|
||||||
'**/build*/sdkconfig',
|
|
||||||
'**/build*/project_description.json',
|
|
||||||
'app_info_*.txt',
|
|
||||||
],
|
|
||||||
ArtifactType.LOGS: [
|
|
||||||
'**/build*/build_log.txt',
|
|
||||||
],
|
|
||||||
ArtifactType.SIZE_REPORTS: [
|
|
||||||
'**/build*/size.json',
|
|
||||||
'size_info.txt',
|
|
||||||
],
|
|
||||||
ArtifactType.JUNIT_REPORTS: [
|
|
||||||
'XUNIT_RESULT*.xml',
|
|
||||||
'build_summary*.xml',
|
|
||||||
],
|
|
||||||
ArtifactType.MODIFIED_FILES_AND_COMPONENTS_REPORT: [
|
|
||||||
'pipeline.env',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def getenv(env_var: str) -> str:
|
|
||||||
try:
|
|
||||||
return os.environ[env_var]
|
|
||||||
except KeyError as e:
|
|
||||||
raise Exception(f'Environment variable {env_var} not set') from e
|
|
||||||
|
|
||||||
|
|
||||||
def get_minio_client() -> Minio:
|
|
||||||
return Minio(
|
|
||||||
getenv('IDF_S3_SERVER').replace('https://', ''),
|
|
||||||
access_key=getenv('IDF_S3_ACCESS_KEY'),
|
|
||||||
secret_key=getenv('IDF_S3_SECRET_KEY'),
|
|
||||||
http_client=urllib3.PoolManager(
|
|
||||||
num_pools=10,
|
|
||||||
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
|
|
||||||
retries=urllib3.Retry(
|
|
||||||
total=5,
|
|
||||||
backoff_factor=0.2,
|
|
||||||
status_forcelist=[500, 502, 503, 504],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _download_files(
|
|
||||||
pipeline_id: int,
|
|
||||||
*,
|
|
||||||
artifact_type: t.Optional[ArtifactType] = None,
|
|
||||||
job_name: t.Optional[str] = None,
|
|
||||||
job_id: t.Optional[int] = None,
|
|
||||||
) -> None:
|
|
||||||
if artifact_type:
|
|
||||||
prefix = f'{pipeline_id}/{artifact_type.value}/'
|
|
||||||
else:
|
|
||||||
prefix = f'{pipeline_id}/'
|
|
||||||
|
|
||||||
for obj in client.list_objects(getenv('IDF_S3_BUCKET'), prefix=prefix, recursive=True):
|
|
||||||
obj_name = obj.object_name
|
|
||||||
obj_p = Path(obj_name)
|
|
||||||
# <pipeline_id>/<action_type>/<job_name>/<job_id>.zip
|
|
||||||
if len(obj_p.parts) != 4:
|
|
||||||
print(f'Invalid object name: {obj_name}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if job_name:
|
|
||||||
# could be a pattern
|
|
||||||
if not fnmatch.fnmatch(obj_p.parts[2], job_name):
|
|
||||||
print(f'Job name {job_name} does not match {obj_p.parts[2]}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if job_id:
|
|
||||||
if obj_p.parts[3] != f'{job_id}.zip':
|
|
||||||
print(f'Job ID {job_id} does not match {obj_p.parts[3]}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, obj_name)
|
|
||||||
print(f'Downloaded {obj_name}')
|
|
||||||
|
|
||||||
if obj_name.endswith('.zip'):
|
|
||||||
with ZipFile(obj_name, 'r') as zr:
|
|
||||||
zr.extractall()
|
|
||||||
print(f'Extracted {obj_name}')
|
|
||||||
|
|
||||||
os.remove(obj_name)
|
|
||||||
|
|
||||||
|
|
||||||
def _upload_files(
|
|
||||||
pipeline_id: int,
|
|
||||||
*,
|
|
||||||
artifact_type: ArtifactType,
|
|
||||||
job_name: str,
|
|
||||||
job_id: str,
|
|
||||||
) -> None:
|
|
||||||
has_file = False
|
|
||||||
with ZipFile(
|
|
||||||
f'{job_id}.zip',
|
|
||||||
'w',
|
|
||||||
compression=zipfile.ZIP_DEFLATED,
|
|
||||||
# 1 is the fastest compression level
|
|
||||||
# the size differs not much between 1 and 9
|
|
||||||
compresslevel=1,
|
|
||||||
) as zw:
|
|
||||||
for pattern in TYPE_PATTERNS_DICT[artifact_type]:
|
|
||||||
for file in glob.glob(pattern, recursive=True):
|
|
||||||
zw.write(file)
|
|
||||||
has_file = True
|
|
||||||
|
|
||||||
try:
|
|
||||||
if has_file:
|
|
||||||
obj_name = f'{pipeline_id}/{artifact_type.value}/{sanitize_job_name(job_name)}/{job_id}.zip'
|
|
||||||
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
|
|
||||||
print(f'Created archive file: {job_id}.zip, uploaded as {obj_name}')
|
|
||||||
finally:
|
|
||||||
os.remove(f'{job_id}.zip')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Download or upload files from/to S3, the object name would be '
|
|
||||||
'[PIPELINE_ID]/[ACTION_TYPE]/[JOB_NAME]/[JOB_ID].zip.'
|
|
||||||
'\n'
|
|
||||||
'For example: 123456/binaries/build_pytest_examples_esp32/123456789.zip',
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
||||||
)
|
|
||||||
|
|
||||||
common_args = argparse.ArgumentParser(add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
||||||
common_args.add_argument('--pipeline-id', type=int, help='Pipeline ID')
|
|
||||||
common_args.add_argument(
|
|
||||||
'--type', type=str, nargs='+', choices=[a.value for a in ArtifactType], help='Types of files to download'
|
|
||||||
)
|
|
||||||
|
|
||||||
action = parser.add_subparsers(dest='action', help='Download or Upload')
|
|
||||||
download = action.add_parser('download', help='Download files from S3', parents=[common_args])
|
|
||||||
upload = action.add_parser('upload', help='Upload files to S3', parents=[common_args])
|
|
||||||
|
|
||||||
download.add_argument('--job-name', type=str, help='Job name pattern')
|
|
||||||
download.add_argument('--job-id', type=int, help='Job ID')
|
|
||||||
|
|
||||||
upload.add_argument('--job-name', type=str, help='Job name')
|
|
||||||
upload.add_argument('--job-id', type=int, help='Job ID')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
client = get_minio_client()
|
|
||||||
|
|
||||||
ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
|
|
||||||
if args.action == 'download':
|
|
||||||
method = _download_files
|
|
||||||
ci_job_name = args.job_name # optional
|
|
||||||
ci_job_id = args.job_id # optional
|
|
||||||
else:
|
|
||||||
method = _upload_files # type: ignore
|
|
||||||
ci_job_name = args.job_name or getenv('CI_JOB_NAME') # required
|
|
||||||
ci_job_id = args.job_id or getenv('CI_JOB_ID') # required
|
|
||||||
|
|
||||||
if args.type:
|
|
||||||
types = [ArtifactType(t) for t in args.type]
|
|
||||||
else:
|
|
||||||
types = list(ArtifactType)
|
|
||||||
|
|
||||||
print(f'{"Pipeline ID":15}: {ci_pipeline_id}')
|
|
||||||
if ci_job_name:
|
|
||||||
print(f'{"Job name":15}: {ci_job_name}')
|
|
||||||
if ci_job_id:
|
|
||||||
print(f'{"Job ID":15}: {ci_job_id}')
|
|
||||||
|
|
||||||
for _t in types:
|
|
||||||
method(ci_pipeline_id, artifact_type=_t, job_name=ci_job_name, job_id=ci_job_id) # type: ignore
|
|
@@ -6,7 +6,6 @@ from idf_ci_utils import IDF_PATH
|
|||||||
|
|
||||||
COMMENT_START_MARKER = '### Dynamic Pipeline Report'
|
COMMENT_START_MARKER = '### Dynamic Pipeline Report'
|
||||||
|
|
||||||
TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME = 'test_related_apps_download_urls.yml'
|
|
||||||
REPORT_TEMPLATE_FILEPATH = os.path.join(
|
REPORT_TEMPLATE_FILEPATH = os.path.join(
|
||||||
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'report.template.html'
|
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'report.template.html'
|
||||||
)
|
)
|
||||||
|
@@ -9,14 +9,11 @@ import re
|
|||||||
import typing as t
|
import typing as t
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import yaml
|
|
||||||
from artifacts_handler import ArtifactType
|
|
||||||
from gitlab import GitlabUpdateError
|
from gitlab import GitlabUpdateError
|
||||||
from gitlab_api import Gitlab
|
from gitlab_api import Gitlab
|
||||||
from idf_build_apps import App
|
|
||||||
from idf_build_apps.constants import BuildStatus
|
from idf_build_apps.constants import BuildStatus
|
||||||
from idf_ci_local.app import AppWithMetricsInfo
|
from idf_ci_local.app import AppWithMetricsInfo
|
||||||
from idf_ci_local.uploader import AppUploader
|
from idf_ci_utils import idf_relpath
|
||||||
from prettytable import PrettyTable
|
from prettytable import PrettyTable
|
||||||
|
|
||||||
from .constants import BINARY_SIZE_METRIC_NAME
|
from .constants import BINARY_SIZE_METRIC_NAME
|
||||||
@@ -29,7 +26,6 @@ from .constants import RETRY_JOB_PICTURE_LINK
|
|||||||
from .constants import RETRY_JOB_PICTURE_PATH
|
from .constants import RETRY_JOB_PICTURE_PATH
|
||||||
from .constants import RETRY_JOB_TITLE
|
from .constants import RETRY_JOB_TITLE
|
||||||
from .constants import SIZE_DIFFERENCE_BYTES_THRESHOLD
|
from .constants import SIZE_DIFFERENCE_BYTES_THRESHOLD
|
||||||
from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
|
||||||
from .constants import TOP_N_APPS_BY_SIZE_DIFF
|
from .constants import TOP_N_APPS_BY_SIZE_DIFF
|
||||||
from .models import GitlabJob
|
from .models import GitlabJob
|
||||||
from .models import TestCase
|
from .models import TestCase
|
||||||
@@ -45,7 +41,17 @@ from .utils import load_known_failure_cases
|
|||||||
class ReportGenerator:
|
class ReportGenerator:
|
||||||
REGEX_PATTERN = r'#### {}\n[\s\S]*?(?=\n#### |$)'
|
REGEX_PATTERN = r'#### {}\n[\s\S]*?(?=\n#### |$)'
|
||||||
|
|
||||||
def __init__(self, project_id: int, mr_iid: int, pipeline_id: int, job_id: int, commit_id: str, *, title: str):
|
def __init__(
|
||||||
|
self,
|
||||||
|
project_id: int,
|
||||||
|
mr_iid: int,
|
||||||
|
pipeline_id: int,
|
||||||
|
job_id: int,
|
||||||
|
commit_id: str,
|
||||||
|
local_commit_id: str,
|
||||||
|
*,
|
||||||
|
title: str,
|
||||||
|
):
|
||||||
gl_project = Gitlab(project_id).project
|
gl_project = Gitlab(project_id).project
|
||||||
if mr_iid is not None:
|
if mr_iid is not None:
|
||||||
self.mr = gl_project.mergerequests.get(mr_iid)
|
self.mr = gl_project.mergerequests.get(mr_iid)
|
||||||
@@ -54,6 +60,7 @@ class ReportGenerator:
|
|||||||
self.pipeline_id = pipeline_id
|
self.pipeline_id = pipeline_id
|
||||||
self.job_id = job_id
|
self.job_id = job_id
|
||||||
self.commit_id = commit_id
|
self.commit_id = commit_id
|
||||||
|
self.local_commit_id = local_commit_id
|
||||||
|
|
||||||
self.title = title
|
self.title = title
|
||||||
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
||||||
@@ -61,11 +68,7 @@ class ReportGenerator:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def get_commit_summary(self) -> str:
|
def get_commit_summary(self) -> str:
|
||||||
"""Return a formatted commit summary string."""
|
return f'with CI commit SHA: {self.commit_id[:8]}, local commit SHA: {self.local_commit_id[:8]}'
|
||||||
return (
|
|
||||||
f'with CI commit SHA: {self.commit_id[:8]}, '
|
|
||||||
f'local commit SHA: {os.getenv("CI_MERGE_REQUEST_SOURCE_BRANCH_SHA", "")[:8]}'
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_download_link_for_url(url: str) -> str:
|
def get_download_link_for_url(url: str) -> str:
|
||||||
@@ -344,14 +347,13 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
job_id: int,
|
job_id: int,
|
||||||
commit_id: str,
|
commit_id: str,
|
||||||
|
local_commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Build Report',
|
title: str = 'Build Report',
|
||||||
apps: t.List[App],
|
apps: t.List[AppWithMetricsInfo],
|
||||||
):
|
) -> None:
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||||
self.apps = apps
|
self.apps = apps
|
||||||
self._uploader = AppUploader(self.pipeline_id)
|
|
||||||
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
|
||||||
self.report_titles_map = {
|
self.report_titles_map = {
|
||||||
'failed_apps': 'Failed Apps',
|
'failed_apps': 'Failed Apps',
|
||||||
'built_test_related_apps': 'Built Apps - Test Related',
|
'built_test_related_apps': 'Built Apps - Test Related',
|
||||||
@@ -363,7 +365,6 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
self.failed_apps_report_file = 'failed_apps.html'
|
self.failed_apps_report_file = 'failed_apps.html'
|
||||||
self.built_apps_report_file = 'built_apps.html'
|
self.built_apps_report_file = 'built_apps.html'
|
||||||
self.skipped_apps_report_file = 'skipped_apps.html'
|
self.skipped_apps_report_file = 'skipped_apps.html'
|
||||||
self.app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = {}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def custom_sort(item: AppWithMetricsInfo) -> t.Tuple[int, t.Any]:
|
def custom_sort(item: AppWithMetricsInfo) -> t.Tuple[int, t.Any]:
|
||||||
@@ -461,19 +462,13 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
sections = []
|
sections = []
|
||||||
|
|
||||||
if new_test_related_apps:
|
if new_test_related_apps:
|
||||||
for app in new_test_related_apps:
|
|
||||||
for artifact_type in [ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES, ArtifactType.MAP_AND_ELF_FILES]:
|
|
||||||
url = self._uploader.get_app_presigned_url(app, artifact_type)
|
|
||||||
self.app_presigned_urls_dict.setdefault(app.build_path, {})[artifact_type.value] = url
|
|
||||||
|
|
||||||
new_test_related_apps_table_section = self.create_table_section(
|
new_test_related_apps_table_section = self.create_table_section(
|
||||||
title=self.report_titles_map['new_test_related_apps'],
|
title=self.report_titles_map['new_test_related_apps'],
|
||||||
items=new_test_related_apps,
|
items=new_test_related_apps,
|
||||||
headers=[
|
headers=[
|
||||||
'App Dir',
|
'App Dir',
|
||||||
'Build Dir',
|
'Build Dir',
|
||||||
'Bin Files with Build Log (without map and elf)',
|
'Download Command',
|
||||||
'Map and Elf Files',
|
|
||||||
'Your Branch App Size',
|
'Your Branch App Size',
|
||||||
],
|
],
|
||||||
row_attrs=[
|
row_attrs=[
|
||||||
@@ -481,31 +476,17 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
'build_dir',
|
'build_dir',
|
||||||
],
|
],
|
||||||
value_functions=[
|
value_functions=[
|
||||||
('Your Branch App Size', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)),
|
('Your Branch App Size', lambda _app: str(_app.metrics[BINARY_SIZE_METRIC_NAME].source_value)),
|
||||||
(
|
(
|
||||||
'Bin Files with Build Log (without map and elf)',
|
'Download Command',
|
||||||
lambda app: self.get_download_link_for_url(
|
lambda _app: f'idf-ci gitlab download-artifacts --pipeline-id {self.pipeline_id} '
|
||||||
self.app_presigned_urls_dict[app.build_path][
|
f'{idf_relpath(_app.build_path)}',
|
||||||
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES.value
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
'Map and Elf Files',
|
|
||||||
lambda app: self.get_download_link_for_url(
|
|
||||||
self.app_presigned_urls_dict[app.build_path][ArtifactType.MAP_AND_ELF_FILES.value]
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
sections.extend(new_test_related_apps_table_section)
|
sections.extend(new_test_related_apps_table_section)
|
||||||
|
|
||||||
if built_test_related_apps:
|
if built_test_related_apps:
|
||||||
for app in built_test_related_apps:
|
|
||||||
for artifact_type in [ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES, ArtifactType.MAP_AND_ELF_FILES]:
|
|
||||||
url = self._uploader.get_app_presigned_url(app, artifact_type)
|
|
||||||
self.app_presigned_urls_dict.setdefault(app.build_path, {})[artifact_type.value] = url
|
|
||||||
|
|
||||||
built_test_related_apps = self._sort_items(
|
built_test_related_apps = self._sort_items(
|
||||||
built_test_related_apps,
|
built_test_related_apps,
|
||||||
key='metrics.binary_size.difference_percentage',
|
key='metrics.binary_size.difference_percentage',
|
||||||
@@ -519,8 +500,7 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
headers=[
|
headers=[
|
||||||
'App Dir',
|
'App Dir',
|
||||||
'Build Dir',
|
'Build Dir',
|
||||||
'Bin Files with Build Log (without map and elf)',
|
'Download Command',
|
||||||
'Map and Elf Files',
|
|
||||||
'Your Branch App Size',
|
'Your Branch App Size',
|
||||||
'Target Branch App Size',
|
'Target Branch App Size',
|
||||||
'Size Diff',
|
'Size Diff',
|
||||||
@@ -536,18 +516,9 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
('Size Diff', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)),
|
('Size Diff', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)),
|
||||||
('Size Diff, %', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)),
|
('Size Diff, %', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)),
|
||||||
(
|
(
|
||||||
'Bin Files with Build Log (without map and elf)',
|
'Download Command',
|
||||||
lambda app: self.get_download_link_for_url(
|
lambda _app: f'idf-ci gitlab download-artifacts --pipeline-id {self.pipeline_id} '
|
||||||
self.app_presigned_urls_dict[app.build_path][
|
f'{idf_relpath(_app.build_path)}',
|
||||||
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES.value
|
|
||||||
]
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
'Map and Elf Files',
|
|
||||||
lambda app: self.get_download_link_for_url(
|
|
||||||
self.app_presigned_urls_dict[app.build_path][ArtifactType.MAP_AND_ELF_FILES.value]
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -560,7 +531,7 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
headers=[
|
headers=[
|
||||||
'App Dir',
|
'App Dir',
|
||||||
'Build Dir',
|
'Build Dir',
|
||||||
'Build Log',
|
'Download Command',
|
||||||
'Your Branch App Size',
|
'Your Branch App Size',
|
||||||
],
|
],
|
||||||
row_attrs=[
|
row_attrs=[
|
||||||
@@ -568,13 +539,12 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
'build_dir',
|
'build_dir',
|
||||||
],
|
],
|
||||||
value_functions=[
|
value_functions=[
|
||||||
('Your Branch App Size', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)),
|
|
||||||
(
|
(
|
||||||
'Build Log',
|
'Download Command',
|
||||||
lambda app: self.get_download_link_for_url(
|
lambda _app: f'idf-ci gitlab download-artifacts --pipeline-id {self.pipeline_id} '
|
||||||
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS)
|
f'{idf_relpath(_app.build_path)}',
|
||||||
),
|
|
||||||
),
|
),
|
||||||
|
('Your Branch App Size', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].source_value)),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
sections.extend(new_non_test_related_apps_table_section)
|
sections.extend(new_non_test_related_apps_table_section)
|
||||||
@@ -592,7 +562,7 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
headers=[
|
headers=[
|
||||||
'App Dir',
|
'App Dir',
|
||||||
'Build Dir',
|
'Build Dir',
|
||||||
'Build Log',
|
'Download Command',
|
||||||
'Your Branch App Size',
|
'Your Branch App Size',
|
||||||
'Target Branch App Size',
|
'Target Branch App Size',
|
||||||
'Size Diff',
|
'Size Diff',
|
||||||
@@ -608,10 +578,9 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
('Size Diff', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)),
|
('Size Diff', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference)),
|
||||||
('Size Diff, %', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)),
|
('Size Diff, %', lambda app: str(app.metrics[BINARY_SIZE_METRIC_NAME].difference_percentage)),
|
||||||
(
|
(
|
||||||
'Build Log',
|
'Download Command',
|
||||||
lambda app: self.get_download_link_for_url(
|
lambda _app: f'idf-ci gitlab download-artifacts --pipeline-id {self.pipeline_id} '
|
||||||
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS)
|
f'{idf_relpath(_app.build_path)}',
|
||||||
),
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -646,11 +615,6 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
|
|
||||||
self.additional_info += self._generate_top_n_apps_by_size_table()
|
self.additional_info += self._generate_top_n_apps_by_size_table()
|
||||||
|
|
||||||
# also generate a yaml file that includes the apps and the presigned urls
|
|
||||||
# for helping debugging locally
|
|
||||||
with open(self.apps_presigned_url_filepath, 'w') as fw:
|
|
||||||
yaml.dump(self.app_presigned_urls_dict, fw)
|
|
||||||
|
|
||||||
return sections
|
return sections
|
||||||
|
|
||||||
def get_failed_apps_report_parts(self) -> t.List[str]:
|
def get_failed_apps_report_parts(self) -> t.List[str]:
|
||||||
@@ -661,14 +625,13 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
failed_apps_table_section = self.create_table_section(
|
failed_apps_table_section = self.create_table_section(
|
||||||
title=self.report_titles_map['failed_apps'],
|
title=self.report_titles_map['failed_apps'],
|
||||||
items=failed_apps,
|
items=failed_apps,
|
||||||
headers=['App Dir', 'Build Dir', 'Failed Reason', 'Build Log'],
|
headers=['App Dir', 'Build Dir', 'Failed Reason', 'Download Command'],
|
||||||
row_attrs=['app_dir', 'build_dir', 'build_comment'],
|
row_attrs=['app_dir', 'build_dir', 'build_comment'],
|
||||||
value_functions=[
|
value_functions=[
|
||||||
(
|
(
|
||||||
'Build Log',
|
'Download Command',
|
||||||
lambda app: self.get_download_link_for_url(
|
lambda _app: f'idf-ci gitlab download-artifacts --pipeline-id {self.pipeline_id} '
|
||||||
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS)
|
f'{idf_relpath(_app.build_path)}',
|
||||||
),
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -690,16 +653,8 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
skipped_apps_table_section = self.create_table_section(
|
skipped_apps_table_section = self.create_table_section(
|
||||||
title=self.report_titles_map['skipped_apps'],
|
title=self.report_titles_map['skipped_apps'],
|
||||||
items=skipped_apps,
|
items=skipped_apps,
|
||||||
headers=['App Dir', 'Build Dir', 'Skipped Reason', 'Build Log'],
|
headers=['App Dir', 'Build Dir', 'Skipped Reason'],
|
||||||
row_attrs=['app_dir', 'build_dir', 'build_comment'],
|
row_attrs=['app_dir', 'build_dir', 'build_comment'],
|
||||||
value_functions=[
|
|
||||||
(
|
|
||||||
'Build Log',
|
|
||||||
lambda app: self.get_download_link_for_url(
|
|
||||||
self._uploader.get_app_presigned_url(app, ArtifactType.LOGS)
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
skipped_apps_report_url = self.write_report_to_file(
|
skipped_apps_report_url = self.write_report_to_file(
|
||||||
self.generate_html_report(''.join(skipped_apps_table_section)),
|
self.generate_html_report(''.join(skipped_apps_table_section)),
|
||||||
@@ -734,11 +689,12 @@ class TargetTestReportGenerator(ReportGenerator):
|
|||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
job_id: int,
|
job_id: int,
|
||||||
commit_id: str,
|
commit_id: str,
|
||||||
|
local_commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Target Test Report',
|
title: str = 'Target Test Report',
|
||||||
test_cases: t.List[TestCase],
|
test_cases: t.List[TestCase],
|
||||||
):
|
) -> None:
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||||
|
|
||||||
self.test_cases = test_cases
|
self.test_cases = test_cases
|
||||||
self._known_failure_cases_set = None
|
self._known_failure_cases_set = None
|
||||||
@@ -975,11 +931,12 @@ class JobReportGenerator(ReportGenerator):
|
|||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
job_id: int,
|
job_id: int,
|
||||||
commit_id: str,
|
commit_id: str,
|
||||||
|
local_commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Job Report',
|
title: str = 'Job Report',
|
||||||
jobs: t.List[GitlabJob],
|
jobs: t.List[GitlabJob],
|
||||||
):
|
):
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
self.report_titles_map = {
|
self.report_titles_map = {
|
||||||
'failed_jobs': 'Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
|
'failed_jobs': 'Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
|
||||||
|
@@ -3,11 +3,12 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
import __init__ # noqa: F401 # inject the system path
|
import __init__ # noqa: F401 # inject the system path
|
||||||
|
from idf_build_apps import json_list_files_to_apps
|
||||||
from idf_ci_local.app import enrich_apps_with_metrics_info
|
from idf_ci_local.app import enrich_apps_with_metrics_info
|
||||||
from idf_ci_local.app import import_apps_from_txt
|
|
||||||
|
|
||||||
from dynamic_pipelines.report import BuildReportGenerator
|
from dynamic_pipelines.report import BuildReportGenerator
|
||||||
from dynamic_pipelines.report import JobReportGenerator
|
from dynamic_pipelines.report import JobReportGenerator
|
||||||
@@ -60,12 +61,13 @@ def common_arguments(parser: argparse.ArgumentParser) -> None:
|
|||||||
parser.add_argument('--mr-iid', type=int, default=os.getenv('CI_MERGE_REQUEST_IID'), help='Merge Request IID')
|
parser.add_argument('--mr-iid', type=int, default=os.getenv('CI_MERGE_REQUEST_IID'), help='Merge Request IID')
|
||||||
parser.add_argument('--pipeline-id', type=int, default=os.getenv('PARENT_PIPELINE_ID'), help='Pipeline ID')
|
parser.add_argument('--pipeline-id', type=int, default=os.getenv('PARENT_PIPELINE_ID'), help='Pipeline ID')
|
||||||
parser.add_argument('--job-id', type=int, default=os.getenv('CI_JOB_ID'), help='Job ID')
|
parser.add_argument('--job-id', type=int, default=os.getenv('CI_JOB_ID'), help='Job ID')
|
||||||
parser.add_argument('--commit-id', default=os.getenv('CI_COMMIT_SHA'), help='MR commit ID')
|
parser.add_argument('--commit-id', default=os.getenv('CI_COMMIT_SHA', ''), help='MR merged result commit ID')
|
||||||
|
parser.add_argument('--local-commit-id', default=os.getenv('PIPELINE_COMMIT_SHA', ''), help='local dev commit ID')
|
||||||
|
|
||||||
|
|
||||||
def conditional_arguments(report_type_args: argparse.Namespace, parser: argparse.ArgumentParser) -> None:
|
def conditional_arguments(report_type_args: argparse.Namespace, parser: argparse.ArgumentParser) -> None:
|
||||||
if report_type_args.report_type == 'build':
|
if report_type_args.report_type == 'build':
|
||||||
parser.add_argument('--app-list-filepattern', default='app_info_*.txt', help='Pattern to match app list files')
|
parser.add_argument('--app-list-filepattern', default='app_info*.txt', help='Pattern to match app list files')
|
||||||
elif report_type_args.report_type == 'target_test':
|
elif report_type_args.report_type == 'target_test':
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--junit-report-filepattern', default='XUNIT_RESULT*.xml', help='Pattern to match JUnit report files'
|
'--junit-report-filepattern', default='XUNIT_RESULT*.xml', help='Pattern to match JUnit report files'
|
||||||
@@ -73,16 +75,30 @@ def conditional_arguments(report_type_args: argparse.Namespace, parser: argparse
|
|||||||
|
|
||||||
|
|
||||||
def generate_build_report(args: argparse.Namespace) -> None:
|
def generate_build_report(args: argparse.Namespace) -> None:
|
||||||
apps: t.List[t.Any] = [
|
# generate presigned url for the artifacts
|
||||||
app for file_name in glob.glob(args.app_list_filepattern) for app in import_apps_from_txt(file_name)
|
subprocess.check_output(
|
||||||
]
|
[
|
||||||
|
'idf-ci',
|
||||||
|
'gitlab',
|
||||||
|
'generate-presigned-json',
|
||||||
|
'--commit-sha',
|
||||||
|
args.local_commit_id,
|
||||||
|
'--output',
|
||||||
|
'presigned.json',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
print('generated presigned.json')
|
||||||
|
|
||||||
|
# generate report
|
||||||
|
apps = json_list_files_to_apps(glob.glob(args.app_list_filepattern))
|
||||||
|
print(f'loaded {len(apps)} apps')
|
||||||
app_metrics = fetch_app_metrics(
|
app_metrics = fetch_app_metrics(
|
||||||
source_commit_sha=os.environ.get('CI_COMMIT_SHA'),
|
source_commit_sha=args.commit_id,
|
||||||
target_commit_sha=os.environ.get('CI_MERGE_REQUEST_TARGET_BRANCH_SHA'),
|
target_commit_sha=os.environ.get('CI_MERGE_REQUEST_TARGET_BRANCH_SHA'),
|
||||||
)
|
)
|
||||||
apps = enrich_apps_with_metrics_info(app_metrics, apps)
|
apps = enrich_apps_with_metrics_info(app_metrics, apps)
|
||||||
report_generator = BuildReportGenerator(
|
report_generator = BuildReportGenerator(
|
||||||
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, apps=apps
|
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, args.local_commit_id, apps=apps
|
||||||
)
|
)
|
||||||
report_generator.post_report()
|
report_generator.post_report()
|
||||||
|
|
||||||
@@ -90,7 +106,13 @@ def generate_build_report(args: argparse.Namespace) -> None:
|
|||||||
def generate_target_test_report(args: argparse.Namespace) -> None:
|
def generate_target_test_report(args: argparse.Namespace) -> None:
|
||||||
test_cases: t.List[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
test_cases: t.List[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
||||||
report_generator = TargetTestReportGenerator(
|
report_generator = TargetTestReportGenerator(
|
||||||
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, test_cases=test_cases
|
args.project_id,
|
||||||
|
args.mr_iid,
|
||||||
|
args.pipeline_id,
|
||||||
|
args.job_id,
|
||||||
|
args.commit_id,
|
||||||
|
args.local_commit_id,
|
||||||
|
test_cases=test_cases,
|
||||||
)
|
)
|
||||||
report_generator.post_report()
|
report_generator.post_report()
|
||||||
|
|
||||||
@@ -102,7 +124,7 @@ def generate_jobs_report(args: argparse.Namespace) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
report_generator = JobReportGenerator(
|
report_generator = JobReportGenerator(
|
||||||
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, jobs=jobs
|
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, args.local_commit_id, jobs=jobs
|
||||||
)
|
)
|
||||||
report_generator.post_report(print_retry_jobs_message=any(job.is_failed for job in jobs))
|
report_generator.post_report(print_retry_jobs_message=any(job.is_failed for job in jobs))
|
||||||
|
|
||||||
|
@@ -25,7 +25,7 @@
|
|||||||
job: pipeline_variables
|
job: pipeline_variables
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
|
# The other artifacts patterns are defined under .idf_ci.toml
|
||||||
# Now we're uploading/downloading the binary files from our internal storage server
|
# Now we're uploading/downloading the binary files from our internal storage server
|
||||||
|
|
||||||
# keep the log file to help debug
|
# keep the log file to help debug
|
||||||
@@ -42,7 +42,6 @@
|
|||||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||||
--parallel-index ${CI_NODE_INDEX:-1}
|
--parallel-index ${CI_NODE_INDEX:-1}
|
||||||
--modified-files ${MR_MODIFIED_FILES}
|
--modified-files ${MR_MODIFIED_FILES}
|
||||||
- run_cmd python tools/ci/artifacts_handler.py upload --type size_reports
|
|
||||||
|
|
||||||
.dynamic_target_test_template:
|
.dynamic_target_test_template:
|
||||||
extends:
|
extends:
|
||||||
@@ -50,9 +49,6 @@
|
|||||||
image: $TARGET_TEST_ENV_IMAGE
|
image: $TARGET_TEST_ENV_IMAGE
|
||||||
stage: target_test
|
stage: target_test
|
||||||
timeout: 1 hour
|
timeout: 1 hour
|
||||||
needs:
|
|
||||||
- pipeline: $PARENT_PIPELINE_ID
|
|
||||||
job: pipeline_variables
|
|
||||||
variables:
|
variables:
|
||||||
SUBMODULES_TO_FETCH: "none"
|
SUBMODULES_TO_FETCH: "none"
|
||||||
# set while generating the pipeline
|
# set while generating the pipeline
|
||||||
@@ -79,13 +75,12 @@
|
|||||||
when: always
|
when: always
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
script:
|
script:
|
||||||
# get known failure cases
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- run_cmd python tools/ci/get_known_failure_cases_file.py
|
|
||||||
# get runner env config file
|
# get runner env config file
|
||||||
- retry_failed git clone $TEST_ENV_CONFIG_REPO
|
- retry_failed git clone $TEST_ENV_CONFIG_REPO
|
||||||
- run_cmd python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
|
- run_cmd python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
|
||||||
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
|
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
|
||||||
- run_cmd pytest ${nodes}
|
- run_cmd pytest $nodes
|
||||||
--pipeline-id $PARENT_PIPELINE_ID
|
--pipeline-id $PARENT_PIPELINE_ID
|
||||||
--junitxml=XUNIT_RESULT_${CI_JOB_NAME_SLUG}.xml
|
--junitxml=XUNIT_RESULT_${CI_JOB_NAME_SLUG}.xml
|
||||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
@@ -94,9 +89,7 @@
|
|||||||
${PYTEST_EXTRA_FLAGS}
|
${PYTEST_EXTRA_FLAGS}
|
||||||
after_script:
|
after_script:
|
||||||
- source tools/ci/utils.sh
|
- source tools/ci/utils.sh
|
||||||
- section_start "upload_junit_reports"
|
- run_cmd idf-ci gitlab upload-artifacts --type junit
|
||||||
- run_cmd python tools/ci/artifacts_handler.py upload --type logs junit_reports
|
|
||||||
- section_end "upload_junit_reports"
|
|
||||||
|
|
||||||
.timeout_4h_template:
|
.timeout_4h_template:
|
||||||
timeout: 4 hours
|
timeout: 4 hours
|
||||||
|
@@ -1,8 +1,29 @@
|
|||||||
|
all_test_finished:
|
||||||
|
stage: .post
|
||||||
|
tags: [fast_run, shiny]
|
||||||
|
image: $ESP_ENV_IMAGE
|
||||||
|
when: always
|
||||||
|
# this job is used to check if all target test jobs are finished
|
||||||
|
# because the `needs` make the later jobs run even if the previous stage are not finished
|
||||||
|
# and there's no `needs: stage` for now in gitlab
|
||||||
|
# https://gitlab.com/gitlab-org/gitlab/-/issues/220758
|
||||||
|
artifacts:
|
||||||
|
untracked: true
|
||||||
|
expire_in: 1 week
|
||||||
|
when: always
|
||||||
|
before_script: []
|
||||||
|
script:
|
||||||
|
- echo "all test jobs finished"
|
||||||
|
|
||||||
generate_pytest_report:
|
generate_pytest_report:
|
||||||
stage: .post
|
stage: .post
|
||||||
tags: [build, shiny]
|
tags: [build, shiny]
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
when: always
|
when: always
|
||||||
|
needs:
|
||||||
|
- all_test_finished
|
||||||
|
- pipeline: $PARENT_PIPELINE_ID
|
||||||
|
job: pipeline_variables
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- target_test_report.html
|
- target_test_report.html
|
||||||
@@ -12,6 +33,6 @@ generate_pytest_report:
|
|||||||
expire_in: 2 week
|
expire_in: 2 week
|
||||||
when: always
|
when: always
|
||||||
script:
|
script:
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||||
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
|
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
|
||||||
- python tools/ci/previous_stage_job_status.py --stage target_test
|
- python tools/ci/previous_stage_job_status.py --stage target_test
|
||||||
|
@@ -1,3 +1,20 @@
|
|||||||
|
all_build_finished:
|
||||||
|
stage: assign_test
|
||||||
|
tags: [fast_run, shiny]
|
||||||
|
image: $ESP_ENV_IMAGE
|
||||||
|
when: always
|
||||||
|
# this job is used to check if all build jobs are finished
|
||||||
|
# because the `needs` make the later jobs run even if the previous stage are not finished
|
||||||
|
# and there's no `needs: stage` for now in gitlab
|
||||||
|
# https://gitlab.com/gitlab-org/gitlab/-/issues/220758
|
||||||
|
artifacts:
|
||||||
|
untracked: true
|
||||||
|
expire_in: 1 week
|
||||||
|
when: always
|
||||||
|
before_script: []
|
||||||
|
script:
|
||||||
|
- echo "all test jobs finished"
|
||||||
|
|
||||||
generate_pytest_build_report:
|
generate_pytest_build_report:
|
||||||
stage: assign_test
|
stage: assign_test
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
@@ -6,20 +23,19 @@ generate_pytest_build_report:
|
|||||||
- shiny
|
- shiny
|
||||||
when: always
|
when: always
|
||||||
needs:
|
needs:
|
||||||
|
- all_build_finished
|
||||||
- pipeline: $PARENT_PIPELINE_ID
|
- pipeline: $PARENT_PIPELINE_ID
|
||||||
job: pipeline_variables
|
job: pipeline_variables
|
||||||
- build_apps
|
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- failed_apps.html
|
- failed_apps.html
|
||||||
- built_apps.html
|
- built_apps.html
|
||||||
- skipped_apps.html
|
- skipped_apps.html
|
||||||
- build_report.html
|
- build_report.html
|
||||||
- test_related_apps_download_urls.yml
|
- presigned.json
|
||||||
expire_in: 2 week
|
expire_in: 1 week
|
||||||
when: always
|
when: always
|
||||||
script:
|
script:
|
||||||
- env
|
|
||||||
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type build
|
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type build
|
||||||
- python tools/ci/previous_stage_job_status.py --stage build
|
- python tools/ci/previous_stage_job_status.py --stage build
|
||||||
|
|
||||||
@@ -31,9 +47,9 @@ generate_pytest_child_pipeline:
|
|||||||
- build
|
- build
|
||||||
- shiny
|
- shiny
|
||||||
needs:
|
needs:
|
||||||
|
- build_test_related_apps # won't work if the parallel count exceeds 100, now it's around 50
|
||||||
- pipeline: $PARENT_PIPELINE_ID
|
- pipeline: $PARENT_PIPELINE_ID
|
||||||
job: pipeline_variables
|
job: pipeline_variables
|
||||||
- build_apps
|
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- target_test_child_pipeline.yml
|
- target_test_child_pipeline.yml
|
||||||
|
@@ -8,12 +8,13 @@ import unittest
|
|||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from idf_build_apps import json_list_files_to_apps
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
|
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
|
||||||
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
|
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
|
||||||
|
|
||||||
from idf_build_apps.constants import BuildStatus # noqa: E402
|
from idf_build_apps.constants import BuildStatus # noqa: E402
|
||||||
from idf_ci_local.app import enrich_apps_with_metrics_info # noqa: E402
|
from idf_ci_local.app import enrich_apps_with_metrics_info # noqa: E402
|
||||||
from idf_ci_local.app import import_apps_from_txt # noqa: E402
|
|
||||||
|
|
||||||
from dynamic_pipelines.models import GitlabJob # noqa: E402
|
from dynamic_pipelines.models import GitlabJob # noqa: E402
|
||||||
from dynamic_pipelines.report import BuildReportGenerator # noqa: E402
|
from dynamic_pipelines.report import BuildReportGenerator # noqa: E402
|
||||||
@@ -40,7 +41,6 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
|
|
||||||
def setup_patches(self) -> None:
|
def setup_patches(self) -> None:
|
||||||
self.gitlab_patcher = patch('dynamic_pipelines.report.Gitlab')
|
self.gitlab_patcher = patch('dynamic_pipelines.report.Gitlab')
|
||||||
self.uploader_patcher = patch('dynamic_pipelines.report.AppUploader')
|
|
||||||
self.failure_rate_patcher = patch('dynamic_pipelines.report.fetch_failed_testcases_failure_ratio')
|
self.failure_rate_patcher = patch('dynamic_pipelines.report.fetch_failed_testcases_failure_ratio')
|
||||||
self.env_patcher = patch.dict(
|
self.env_patcher = patch.dict(
|
||||||
'os.environ',
|
'os.environ',
|
||||||
@@ -54,7 +54,6 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
self.yaml_dump_patcher = patch('dynamic_pipelines.report.yaml.dump')
|
self.yaml_dump_patcher = patch('dynamic_pipelines.report.yaml.dump')
|
||||||
|
|
||||||
self.MockGitlab = self.gitlab_patcher.start()
|
self.MockGitlab = self.gitlab_patcher.start()
|
||||||
self.MockUploader = self.uploader_patcher.start()
|
|
||||||
self.test_cases_failure_rate = self.failure_rate_patcher.start()
|
self.test_cases_failure_rate = self.failure_rate_patcher.start()
|
||||||
self.env_patcher.start()
|
self.env_patcher.start()
|
||||||
self.yaml_dump_patcher.start()
|
self.yaml_dump_patcher.start()
|
||||||
@@ -63,10 +62,8 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
self.mock_mr = MagicMock()
|
self.mock_mr = MagicMock()
|
||||||
self.MockGitlab.return_value.project = self.mock_project
|
self.MockGitlab.return_value.project = self.mock_project
|
||||||
self.mock_project.mergerequests.get.return_value = self.mock_mr
|
self.mock_project.mergerequests.get.return_value = self.mock_mr
|
||||||
self.MockUploader.return_value.get_app_presigned_url.return_value = 'https://example.com/presigned-url'
|
|
||||||
|
|
||||||
self.addCleanup(self.gitlab_patcher.stop)
|
self.addCleanup(self.gitlab_patcher.stop)
|
||||||
self.addCleanup(self.uploader_patcher.stop)
|
|
||||||
self.addCleanup(self.failure_rate_patcher.stop)
|
self.addCleanup(self.failure_rate_patcher.stop)
|
||||||
self.addCleanup(self.env_patcher.stop)
|
self.addCleanup(self.env_patcher.stop)
|
||||||
self.addCleanup(self.yaml_dump_patcher.stop)
|
self.addCleanup(self.yaml_dump_patcher.stop)
|
||||||
@@ -80,7 +77,6 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
self.build_report_generator.failed_apps_report_file,
|
self.build_report_generator.failed_apps_report_file,
|
||||||
self.build_report_generator.built_apps_report_file,
|
self.build_report_generator.built_apps_report_file,
|
||||||
self.build_report_generator.skipped_apps_report_file,
|
self.build_report_generator.skipped_apps_report_file,
|
||||||
self.build_report_generator.apps_presigned_url_filepath,
|
|
||||||
]
|
]
|
||||||
for file_path in files_to_delete:
|
for file_path in files_to_delete:
|
||||||
if os.path.exists(file_path):
|
if os.path.exists(file_path):
|
||||||
@@ -112,7 +108,8 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
]
|
]
|
||||||
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
||||||
apps = enrich_apps_with_metrics_info(
|
apps = enrich_apps_with_metrics_info(
|
||||||
built_apps_size_info_response, import_apps_from_txt(os.path.join(self.reports_sample_data_path, 'apps'))
|
built_apps_size_info_response,
|
||||||
|
json_list_files_to_apps([os.path.join(self.reports_sample_data_path, 'apps')]),
|
||||||
)
|
)
|
||||||
self.target_test_report_generator = TargetTestReportGenerator(
|
self.target_test_report_generator = TargetTestReportGenerator(
|
||||||
project_id=123,
|
project_id=123,
|
||||||
|
@@ -4,7 +4,6 @@ tools/bt/bt_hci_to_btsnoop.py
|
|||||||
tools/catch/**/*
|
tools/catch/**/*
|
||||||
tools/check_term.py
|
tools/check_term.py
|
||||||
tools/ci/*exclude*.txt
|
tools/ci/*exclude*.txt
|
||||||
tools/ci/artifacts_handler.py
|
|
||||||
tools/ci/astyle-rules.yml
|
tools/ci/astyle-rules.yml
|
||||||
tools/ci/check_*.py
|
tools/ci/check_*.py
|
||||||
tools/ci/check_*.sh
|
tools/ci/check_*.sh
|
||||||
@@ -23,7 +22,6 @@ tools/ci/fix_empty_prototypes.sh
|
|||||||
tools/ci/generate_rules.py
|
tools/ci/generate_rules.py
|
||||||
tools/ci/get-full-sources.sh
|
tools/ci/get-full-sources.sh
|
||||||
tools/ci/get_all_test_results.py
|
tools/ci/get_all_test_results.py
|
||||||
tools/ci/get_known_failure_cases_file.py
|
|
||||||
tools/ci/get_supported_examples.sh
|
tools/ci/get_supported_examples.sh
|
||||||
tools/ci/gitlab_yaml_linter.py
|
tools/ci/gitlab_yaml_linter.py
|
||||||
tools/ci/idf_build_apps_dump_soc_caps.py
|
tools/ci/idf_build_apps_dump_soc_caps.py
|
||||||
|
@@ -1,22 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import urllib3
|
|
||||||
from minio import Minio
|
|
||||||
|
|
||||||
from artifacts_handler import get_minio_client
|
|
||||||
|
|
||||||
|
|
||||||
def getenv(env_var: str) -> str:
|
|
||||||
try:
|
|
||||||
return os.environ[env_var]
|
|
||||||
except KeyError as e:
|
|
||||||
raise Exception(f'Environment variable {env_var} not set') from e
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
client = get_minio_client()
|
|
||||||
file_name = getenv('KNOWN_FAILURE_CASES_FILE_NAME')
|
|
||||||
client.fget_object('ignore-test-result-files', file_name, file_name)
|
|
@@ -1,22 +1,20 @@
|
|||||||
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from dynamic_pipelines.constants import BINARY_SIZE_METRIC_NAME
|
from dynamic_pipelines.constants import BINARY_SIZE_METRIC_NAME
|
||||||
from idf_build_apps import App
|
from idf_build_apps import App
|
||||||
from idf_build_apps import CMakeApp
|
from idf_build_apps import CMakeApp
|
||||||
from idf_build_apps import json_to_app
|
from idf_build_apps.utils import rmdir
|
||||||
|
|
||||||
from .uploader import get_app_uploader
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
from .uploader import AppUploader
|
pass
|
||||||
|
|
||||||
|
|
||||||
class IdfCMakeApp(CMakeApp):
|
class IdfCMakeApp(CMakeApp):
|
||||||
uploader: t.ClassVar[t.Optional['AppUploader']] = get_app_uploader()
|
|
||||||
build_system: t.Literal['idf_cmake'] = 'idf_cmake'
|
build_system: t.Literal['idf_cmake'] = 'idf_cmake'
|
||||||
|
|
||||||
def _initialize_hook(self, **kwargs: t.Any) -> None:
|
def _initialize_hook(self, **kwargs: t.Any) -> None:
|
||||||
@@ -28,8 +26,24 @@ class IdfCMakeApp(CMakeApp):
|
|||||||
def _post_build(self) -> None:
|
def _post_build(self) -> None:
|
||||||
super()._post_build()
|
super()._post_build()
|
||||||
|
|
||||||
if self.uploader:
|
# only upload in CI
|
||||||
self.uploader.upload_app(self.build_path)
|
if os.getenv('CI_JOB_ID'):
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
'idf-ci',
|
||||||
|
'gitlab',
|
||||||
|
'upload-artifacts',
|
||||||
|
self.app_dir,
|
||||||
|
],
|
||||||
|
stdout=sys.stdout,
|
||||||
|
stderr=sys.stderr,
|
||||||
|
)
|
||||||
|
rmdir(
|
||||||
|
self.build_path,
|
||||||
|
exclude_file_patterns=[
|
||||||
|
'build_log.txt',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Metrics:
|
class Metrics:
|
||||||
@@ -75,26 +89,6 @@ class AppWithMetricsInfo(IdfCMakeApp):
|
|||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
|
||||||
def dump_apps_to_txt(apps: t.List[App], output_filepath: str) -> None:
|
|
||||||
with open(output_filepath, 'w') as fw:
|
|
||||||
for app in apps:
|
|
||||||
fw.write(app.model_dump_json() + '\n')
|
|
||||||
|
|
||||||
|
|
||||||
def import_apps_from_txt(input_filepath: str) -> t.List[App]:
|
|
||||||
apps: t.List[App] = []
|
|
||||||
with open(input_filepath) as fr:
|
|
||||||
for line in fr:
|
|
||||||
if line := line.strip():
|
|
||||||
try:
|
|
||||||
apps.append(json_to_app(line, extra_classes=[IdfCMakeApp]))
|
|
||||||
except Exception: # noqa
|
|
||||||
print('Failed to deserialize app from line: %s' % line)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return apps
|
|
||||||
|
|
||||||
|
|
||||||
def enrich_apps_with_metrics_info(
|
def enrich_apps_with_metrics_info(
|
||||||
app_metrics_info_map: t.Dict[str, t.Dict[str, t.Any]], apps: t.List[App]
|
app_metrics_info_map: t.Dict[str, t.Dict[str, t.Any]], apps: t.List[App]
|
||||||
) -> t.List[AppWithMetricsInfo]:
|
) -> t.List[AppWithMetricsInfo]:
|
||||||
|
@@ -1,170 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
import abc
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import typing as t
|
|
||||||
from datetime import timedelta
|
|
||||||
from zipfile import ZIP_DEFLATED
|
|
||||||
from zipfile import ZipFile
|
|
||||||
|
|
||||||
import minio
|
|
||||||
from artifacts_handler import ArtifactType
|
|
||||||
from artifacts_handler import get_minio_client
|
|
||||||
from artifacts_handler import getenv
|
|
||||||
from idf_build_apps import App
|
|
||||||
from idf_build_apps.utils import rmdir
|
|
||||||
from idf_ci_utils import IDF_PATH
|
|
||||||
|
|
||||||
|
|
||||||
class AppDownloader:
|
|
||||||
ALL_ARTIFACT_TYPES = [ArtifactType.MAP_AND_ELF_FILES, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def _download_app(self, app_build_path: str, artifact_type: ArtifactType) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def download_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
|
|
||||||
"""
|
|
||||||
Download the app
|
|
||||||
:param app_build_path: the path to the build directory
|
|
||||||
:param artifact_type: if not specify, download all types of artifacts
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
if not artifact_type:
|
|
||||||
for _artifact_type in self.ALL_ARTIFACT_TYPES:
|
|
||||||
self._download_app(app_build_path, _artifact_type)
|
|
||||||
else:
|
|
||||||
self._download_app(app_build_path, artifact_type)
|
|
||||||
|
|
||||||
|
|
||||||
class AppUploader(AppDownloader):
|
|
||||||
TYPE_PATTERNS_DICT = {
|
|
||||||
ArtifactType.MAP_AND_ELF_FILES: [
|
|
||||||
'bootloader/*.map',
|
|
||||||
'bootloader/*.elf',
|
|
||||||
'esp_tee/*.map',
|
|
||||||
'esp_tee/*.elf',
|
|
||||||
'*.map',
|
|
||||||
'*.elf',
|
|
||||||
'gdbinit/*',
|
|
||||||
],
|
|
||||||
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
|
|
||||||
'*.bin',
|
|
||||||
'bootloader/*.bin',
|
|
||||||
'esp_tee/*.bin',
|
|
||||||
'partition_table/*.bin',
|
|
||||||
'flasher_args.json',
|
|
||||||
'flash_project_args',
|
|
||||||
'config/sdkconfig.json',
|
|
||||||
'sdkconfig',
|
|
||||||
'project_description.json',
|
|
||||||
],
|
|
||||||
ArtifactType.LOGS: [
|
|
||||||
'build_log.txt',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, pipeline_id: t.Union[str, int, None] = None) -> None:
|
|
||||||
self.pipeline_id = str(pipeline_id or '1')
|
|
||||||
|
|
||||||
self._client = get_minio_client()
|
|
||||||
|
|
||||||
def get_app_object_name(self, app_path: str, zip_name: str, artifact_type: ArtifactType) -> str:
|
|
||||||
return f'{self.pipeline_id}/{artifact_type.value}/{app_path}/{zip_name}'
|
|
||||||
|
|
||||||
def _upload_app(self, app_build_path: str, artifact_type: ArtifactType) -> bool:
|
|
||||||
app_path, build_dir = os.path.split(app_build_path)
|
|
||||||
zip_filename = f'{build_dir}.zip'
|
|
||||||
|
|
||||||
has_file = False
|
|
||||||
with ZipFile(
|
|
||||||
zip_filename,
|
|
||||||
'w',
|
|
||||||
compression=ZIP_DEFLATED,
|
|
||||||
# 1 is the fastest compression level
|
|
||||||
# the size differs not much between 1 and 9
|
|
||||||
compresslevel=1,
|
|
||||||
) as zw:
|
|
||||||
for pattern in self.TYPE_PATTERNS_DICT[artifact_type]:
|
|
||||||
for file in glob.glob(os.path.join(app_build_path, pattern), recursive=True):
|
|
||||||
zw.write(file)
|
|
||||||
has_file = True
|
|
||||||
|
|
||||||
uploaded = False
|
|
||||||
try:
|
|
||||||
if has_file:
|
|
||||||
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
|
|
||||||
self._client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
|
|
||||||
uploaded = True
|
|
||||||
finally:
|
|
||||||
os.remove(zip_filename)
|
|
||||||
return uploaded
|
|
||||||
|
|
||||||
def upload_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
|
|
||||||
uploaded = False
|
|
||||||
if not artifact_type:
|
|
||||||
upload_types: t.Iterable[ArtifactType] = self.TYPE_PATTERNS_DICT.keys()
|
|
||||||
else:
|
|
||||||
upload_types = [artifact_type]
|
|
||||||
|
|
||||||
# Upload of size.json files is handled by GitLab CI via "artifacts_handler.py" script.
|
|
||||||
print(f'Uploading {app_build_path} {[k.value for k in upload_types]} to minio server')
|
|
||||||
for upload_type in upload_types:
|
|
||||||
uploaded |= self._upload_app(app_build_path, upload_type)
|
|
||||||
|
|
||||||
if uploaded:
|
|
||||||
rmdir(app_build_path, exclude_file_patterns=['build_log.txt', 'size.json'])
|
|
||||||
|
|
||||||
def _download_app(self, app_build_path: str, artifact_type: ArtifactType) -> None:
|
|
||||||
app_path, build_dir = os.path.split(app_build_path)
|
|
||||||
zip_filename = f'{build_dir}.zip'
|
|
||||||
|
|
||||||
# path are relative to IDF_PATH
|
|
||||||
current_dir = os.getcwd()
|
|
||||||
os.chdir(IDF_PATH)
|
|
||||||
try:
|
|
||||||
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
|
|
||||||
print(f'Downloading {obj_name}')
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
self._client.stat_object(getenv('IDF_S3_BUCKET'), obj_name)
|
|
||||||
except minio.error.S3Error as e:
|
|
||||||
raise RuntimeError(
|
|
||||||
f'No such file on minio server: {obj_name}. '
|
|
||||||
f'Probably the build failed or the artifacts got expired. '
|
|
||||||
f'Full error message: {str(e)}'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
|
|
||||||
print(f'Downloaded to {zip_filename}')
|
|
||||||
except minio.error.S3Error as e:
|
|
||||||
raise RuntimeError("Shouldn't happen, please report this bug in the CI channel" + str(e))
|
|
||||||
|
|
||||||
with ZipFile(zip_filename, 'r') as zr:
|
|
||||||
zr.extractall()
|
|
||||||
|
|
||||||
os.remove(zip_filename)
|
|
||||||
finally:
|
|
||||||
os.chdir(current_dir)
|
|
||||||
|
|
||||||
def get_app_presigned_url(self, app: App, artifact_type: ArtifactType) -> str:
|
|
||||||
obj_name = self.get_app_object_name(app.app_dir, f'{app.build_dir}.zip', artifact_type)
|
|
||||||
try:
|
|
||||||
self._client.stat_object(
|
|
||||||
getenv('IDF_S3_BUCKET'),
|
|
||||||
obj_name,
|
|
||||||
)
|
|
||||||
except minio.error.S3Error:
|
|
||||||
return ''
|
|
||||||
else:
|
|
||||||
return self._client.get_presigned_url( # type: ignore
|
|
||||||
'GET', getenv('IDF_S3_BUCKET'), obj_name, expires=timedelta(days=4)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_app_uploader() -> t.Optional['AppUploader']:
|
|
||||||
if parent_pipeline_id := os.getenv('PARENT_PIPELINE_ID'):
|
|
||||||
return AppUploader(parent_pipeline_id)
|
|
||||||
|
|
||||||
return None
|
|
@@ -6,7 +6,7 @@
|
|||||||
# https://docs.espressif.com/projects/esp-idf/en/latest/api-guides/tools/idf-tools.html
|
# https://docs.espressif.com/projects/esp-idf/en/latest/api-guides/tools/idf-tools.html
|
||||||
|
|
||||||
# ci
|
# ci
|
||||||
idf-ci==0.1.20
|
idf-ci==0.1.35
|
||||||
|
|
||||||
coverage
|
coverage
|
||||||
jsonschema
|
jsonschema
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from artifacts_handler import ArtifactType
|
|
||||||
from idf_ci_utils import IDF_PATH
|
from idf_ci_utils import IDF_PATH
|
||||||
from pytest_embedded import Dut
|
from pytest_embedded import Dut
|
||||||
from pytest_embedded_idf.utils import idf_parametrize
|
from pytest_embedded_idf.utils import idf_parametrize
|
||||||
@@ -21,7 +20,8 @@ def test_app_mmu_page_size_32k_and_bootloader_mmu_page_size_64k(dut: Dut, app_do
|
|||||||
path_to_mmu_page_size_64k_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
path_to_mmu_page_size_64k_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
||||||
if app_downloader:
|
if app_downloader:
|
||||||
app_downloader.download_app(
|
app_downloader.download_app(
|
||||||
os.path.relpath(path_to_mmu_page_size_64k_build, IDF_PATH), ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
|
os.path.relpath(path_to_mmu_page_size_64k_build, IDF_PATH),
|
||||||
|
'flash',
|
||||||
)
|
)
|
||||||
|
|
||||||
dut.serial.bootloader_flash(path_to_mmu_page_size_64k_build)
|
dut.serial.bootloader_flash(path_to_mmu_page_size_64k_build)
|
||||||
@@ -43,7 +43,8 @@ def test_app_mmu_page_size_64k_and_bootloader_mmu_page_size_32k(dut: Dut, app_do
|
|||||||
path_to_mmu_page_size_32k_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
path_to_mmu_page_size_32k_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
||||||
if app_downloader:
|
if app_downloader:
|
||||||
app_downloader.download_app(
|
app_downloader.download_app(
|
||||||
os.path.relpath(path_to_mmu_page_size_32k_build, IDF_PATH), ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
|
os.path.relpath(path_to_mmu_page_size_32k_build, IDF_PATH),
|
||||||
|
'flash',
|
||||||
)
|
)
|
||||||
|
|
||||||
dut.serial.bootloader_flash(path_to_mmu_page_size_32k_build)
|
dut.serial.bootloader_flash(path_to_mmu_page_size_32k_build)
|
||||||
|
@@ -4,7 +4,6 @@ import os
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from artifacts_handler import ArtifactType
|
|
||||||
from idf_ci_utils import IDF_PATH
|
from idf_ci_utils import IDF_PATH
|
||||||
from pytest_embedded import Dut
|
from pytest_embedded import Dut
|
||||||
from pytest_embedded_idf.utils import idf_parametrize
|
from pytest_embedded_idf.utils import idf_parametrize
|
||||||
@@ -24,7 +23,8 @@ def test_multicore_app_and_unicore_bootloader(dut: Dut, app_downloader, config)
|
|||||||
path_to_unicore_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
path_to_unicore_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
||||||
if app_downloader:
|
if app_downloader:
|
||||||
app_downloader.download_app(
|
app_downloader.download_app(
|
||||||
os.path.relpath(path_to_unicore_build, IDF_PATH), ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
|
os.path.relpath(path_to_unicore_build, IDF_PATH),
|
||||||
|
'flash',
|
||||||
)
|
)
|
||||||
|
|
||||||
dut.serial.bootloader_flash(path_to_unicore_build)
|
dut.serial.bootloader_flash(path_to_unicore_build)
|
||||||
@@ -50,7 +50,8 @@ def test_unicore_app_and_multicore_bootloader(dut: Dut, app_downloader, config)
|
|||||||
path_to_multicore_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
path_to_multicore_build = os.path.join(dut.app.app_path, f'build_{dut.target}_{app_config}')
|
||||||
if app_downloader:
|
if app_downloader:
|
||||||
app_downloader.download_app(
|
app_downloader.download_app(
|
||||||
os.path.relpath(path_to_multicore_build, IDF_PATH), ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
|
os.path.relpath(path_to_multicore_build, IDF_PATH),
|
||||||
|
'flash',
|
||||||
)
|
)
|
||||||
|
|
||||||
dut.serial.bootloader_flash(path_to_multicore_build)
|
dut.serial.bootloader_flash(path_to_multicore_build)
|
||||||
|
Reference in New Issue
Block a user