mirror of
https://github.com/espressif/esp-idf.git
synced 2025-12-28 13:53:09 +00:00
Merge branch 'ci/enable-junit-report-in-mr' into 'master'
ci: enable junit report in MR See merge request espressif/esp-idf!42925
This commit is contained in:
@@ -64,3 +64,21 @@ deploy_update_SHA_in_esp-dockerfiles:
|
||||
environment:
|
||||
name: deploy_update_SHA_in_esp-dockerfiles_production
|
||||
deployment_tier: production
|
||||
|
||||
upload_junit_report:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
tags: [ fast_run, shiny ]
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- job: build_child_pipeline
|
||||
artifacts: false
|
||||
script:
|
||||
- run_cmd idf-ci gitlab download-artifacts --type junit
|
||||
rules:
|
||||
- when: always
|
||||
artifacts:
|
||||
reports:
|
||||
junit: XUNIT_RESULT_*.xml
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
|
||||
@@ -7,7 +7,9 @@ generate_failed_jobs_report:
|
||||
- .post_deploy_template
|
||||
tags: [build, shiny]
|
||||
when: always
|
||||
dependencies: [] # Do not download artifacts from the previous stages
|
||||
dependencies: # Do not download artifacts from the previous stages
|
||||
needs:
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
expire_in: 2 week
|
||||
when: always
|
||||
|
||||
@@ -48,7 +48,6 @@ pre_yaml_jinja = """
|
||||
include:
|
||||
- .gitlab/ci/common.yml
|
||||
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||
- tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml
|
||||
"""
|
||||
|
||||
[gitlab.artifacts.s3.debug]
|
||||
@@ -89,13 +88,13 @@ patterns = [
|
||||
[gitlab.artifacts.s3.junit]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'XUNIT_RESULT_*.xml',
|
||||
'**/XUNIT_RESULT_*.xml',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.env]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'pipeline.env',
|
||||
'**/pipeline.env',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.longterm]
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import abc
|
||||
import copy
|
||||
import fnmatch
|
||||
import html
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
from textwrap import dedent
|
||||
@@ -29,13 +26,10 @@ from .constants import SIZE_DIFFERENCE_BYTES_THRESHOLD
|
||||
from .constants import TOP_N_APPS_BY_SIZE_DIFF
|
||||
from .models import GitlabJob
|
||||
from .models import TestCase
|
||||
from .utils import fetch_failed_testcases_failure_ratio
|
||||
from .utils import format_permalink
|
||||
from .utils import get_artifacts_url
|
||||
from .utils import get_repository_file_url
|
||||
from .utils import is_url
|
||||
from .utils import known_failure_issue_jira_fast_link
|
||||
from .utils import load_known_failure_cases
|
||||
|
||||
|
||||
class ReportGenerator:
|
||||
@@ -78,7 +72,7 @@ class ReportGenerator:
|
||||
return ''
|
||||
|
||||
@staticmethod
|
||||
def write_report_to_file(report_str: str, job_id: int, output_filepath: str) -> t.Optional[str]:
|
||||
def write_report_to_file(report_str: str, job_id: int, output_filepath: str) -> str | None:
|
||||
"""
|
||||
Writes the report to a file and constructs a modified URL based on environment settings.
|
||||
|
||||
@@ -106,9 +100,9 @@ class ReportGenerator:
|
||||
:return: Content of the file as string
|
||||
"""
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
with open(filepath, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
except (IOError, FileNotFoundError) as e:
|
||||
except (OSError, FileNotFoundError) as e:
|
||||
print(f'Warning: Could not read file {filepath}: {e}')
|
||||
return ''
|
||||
|
||||
@@ -140,8 +134,8 @@ class ReportGenerator:
|
||||
items: list,
|
||||
headers: list,
|
||||
row_attrs: list,
|
||||
value_functions: t.Optional[list] = None,
|
||||
) -> t.List:
|
||||
value_functions: list | None = None,
|
||||
) -> list:
|
||||
"""
|
||||
Appends a formatted section to a report based on the provided items. This section includes
|
||||
a header and a table constructed from the items list with specified headers and attributes.
|
||||
@@ -173,7 +167,7 @@ class ReportGenerator:
|
||||
|
||||
@staticmethod
|
||||
def generate_additional_info_section(
|
||||
title: str, count: int, report_url: t.Optional[str] = None, add_permalink: bool = True
|
||||
title: str, count: int, report_url: str | None = None, add_permalink: bool = True
|
||||
) -> str:
|
||||
"""
|
||||
Generate a section for the additional info string.
|
||||
@@ -194,10 +188,10 @@ class ReportGenerator:
|
||||
|
||||
def _create_table_for_items(
|
||||
self,
|
||||
items: t.Union[t.List[TestCase], t.List[GitlabJob]],
|
||||
headers: t.List[str],
|
||||
row_attrs: t.List[str],
|
||||
value_functions: t.Optional[t.List[t.Tuple[str, t.Callable[[t.Union[TestCase, GitlabJob]], str]]]] = None,
|
||||
items: list[TestCase] | list[GitlabJob],
|
||||
headers: list[str],
|
||||
row_attrs: list[str],
|
||||
value_functions: list[tuple[str, t.Callable[[TestCase | GitlabJob], str]]] | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Create a PrettyTable and convert it to an HTML string for the provided test cases.
|
||||
@@ -237,8 +231,8 @@ class ReportGenerator:
|
||||
|
||||
@staticmethod
|
||||
def _filter_items(
|
||||
items: t.Union[t.List[TestCase], t.List[GitlabJob]], condition: t.Callable[[t.Union[TestCase, GitlabJob]], bool]
|
||||
) -> t.List[TestCase]:
|
||||
items: list[TestCase] | list[GitlabJob], condition: t.Callable[[TestCase | GitlabJob], bool]
|
||||
) -> list[TestCase]:
|
||||
"""
|
||||
Filter items s based on a given condition.
|
||||
|
||||
@@ -250,11 +244,11 @@ class ReportGenerator:
|
||||
|
||||
@staticmethod
|
||||
def _sort_items(
|
||||
items: t.List[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]],
|
||||
key: t.Union[str, t.Callable[[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]], t.Any]],
|
||||
items: list[TestCase | GitlabJob | AppWithMetricsInfo],
|
||||
key: str | t.Callable[[TestCase | GitlabJob | AppWithMetricsInfo], t.Any],
|
||||
order: str = 'asc',
|
||||
sort_function: t.Optional[t.Callable[[t.Any], t.Any]] = None,
|
||||
) -> t.List[t.Union[TestCase, GitlabJob, AppWithMetricsInfo]]:
|
||||
sort_function: t.Callable[[t.Any], t.Any] | None = None,
|
||||
) -> list[TestCase | GitlabJob | AppWithMetricsInfo]:
|
||||
"""
|
||||
Sort items based on a given key, order, and optional custom sorting function.
|
||||
|
||||
@@ -350,7 +344,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
local_commit_id: str,
|
||||
*,
|
||||
title: str = 'Build Report',
|
||||
apps: t.List[AppWithMetricsInfo],
|
||||
apps: list[AppWithMetricsInfo],
|
||||
) -> None:
|
||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||
self.apps = apps
|
||||
@@ -367,7 +361,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
self.skipped_apps_report_file = 'skipped_apps.html'
|
||||
|
||||
@staticmethod
|
||||
def custom_sort(item: AppWithMetricsInfo) -> t.Tuple[int, t.Any]:
|
||||
def custom_sort(item: AppWithMetricsInfo) -> tuple[int, t.Any]:
|
||||
"""
|
||||
Custom sort function to:
|
||||
1. Push items with zero binary sizes to the end.
|
||||
@@ -424,7 +418,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
@staticmethod
|
||||
def split_new_and_existing_apps(
|
||||
apps: t.Iterable[AppWithMetricsInfo],
|
||||
) -> t.Tuple[t.List[AppWithMetricsInfo], t.List[AppWithMetricsInfo]]:
|
||||
) -> tuple[list[AppWithMetricsInfo], list[AppWithMetricsInfo]]:
|
||||
"""
|
||||
Splits apps into new apps and existing apps.
|
||||
|
||||
@@ -435,7 +429,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
existing_apps = [app for app in apps if not app.is_new_app]
|
||||
return new_apps, existing_apps
|
||||
|
||||
def filter_apps_by_criteria(self, build_status: str, preserve: bool) -> t.List[AppWithMetricsInfo]:
|
||||
def filter_apps_by_criteria(self, build_status: str, preserve: bool) -> list[AppWithMetricsInfo]:
|
||||
"""
|
||||
Filters apps based on build status and preserve criteria.
|
||||
|
||||
@@ -445,7 +439,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
"""
|
||||
return [app for app in self.apps if app.build_status == build_status and app.preserve == preserve]
|
||||
|
||||
def get_built_apps_report_parts(self) -> t.List[str]:
|
||||
def get_built_apps_report_parts(self) -> list[str]:
|
||||
"""
|
||||
Generates report parts for new and existing apps.
|
||||
|
||||
@@ -617,7 +611,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
|
||||
return sections
|
||||
|
||||
def get_failed_apps_report_parts(self) -> t.List[str]:
|
||||
def get_failed_apps_report_parts(self) -> list[str]:
|
||||
failed_apps = [app for app in self.apps if app.build_status == BuildStatus.FAILED]
|
||||
if not failed_apps:
|
||||
return []
|
||||
@@ -645,7 +639,7 @@ class BuildReportGenerator(ReportGenerator):
|
||||
)
|
||||
return failed_apps_table_section
|
||||
|
||||
def get_skipped_apps_report_parts(self) -> t.List[str]:
|
||||
def get_skipped_apps_report_parts(self) -> list[str]:
|
||||
skipped_apps = [app for app in self.apps if app.build_status == BuildStatus.SKIPPED]
|
||||
if not skipped_apps:
|
||||
return []
|
||||
@@ -681,248 +675,6 @@ class BuildReportGenerator(ReportGenerator):
|
||||
)
|
||||
|
||||
|
||||
class TargetTestReportGenerator(ReportGenerator):
|
||||
def __init__(
|
||||
self,
|
||||
project_id: int,
|
||||
mr_iid: int,
|
||||
pipeline_id: int,
|
||||
job_id: int,
|
||||
commit_id: str,
|
||||
local_commit_id: str,
|
||||
*,
|
||||
title: str = 'Target Test Report',
|
||||
test_cases: t.List[TestCase],
|
||||
) -> None:
|
||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||
|
||||
self.test_cases = test_cases
|
||||
self._known_failure_cases_set = None
|
||||
self.report_titles_map = {
|
||||
'failed_yours': 'Testcases failed ONLY on your branch (known failures are excluded)',
|
||||
'failed_others': 'Testcases failed on your branch as well as on others (known failures are excluded)',
|
||||
'failed_known': 'Known Failure Cases',
|
||||
'skipped': 'Skipped Test Cases',
|
||||
'succeeded': 'Succeeded Test Cases',
|
||||
}
|
||||
self.skipped_test_cases_report_file = 'skipped_cases.html'
|
||||
self.succeeded_cases_report_file = 'succeeded_cases.html'
|
||||
self.failed_cases_report_file = 'failed_cases.html'
|
||||
|
||||
@property
|
||||
def known_failure_cases_set(self) -> t.Optional[t.Set[str]]:
|
||||
if self._known_failure_cases_set is None:
|
||||
self._known_failure_cases_set = load_known_failure_cases()
|
||||
|
||||
return self._known_failure_cases_set
|
||||
|
||||
def get_known_failure_cases(self) -> t.List[TestCase]:
|
||||
"""
|
||||
Retrieve the known failure test cases.
|
||||
:return: A list of known failure test cases.
|
||||
"""
|
||||
if self.known_failure_cases_set is None:
|
||||
return []
|
||||
matched_cases = [
|
||||
testcase
|
||||
for testcase in self.test_cases
|
||||
if any(fnmatch.fnmatch(testcase.name, pattern) for pattern in self.known_failure_cases_set)
|
||||
and testcase.is_failure
|
||||
]
|
||||
return matched_cases
|
||||
|
||||
@staticmethod
|
||||
def filter_test_cases(
|
||||
cur_branch_failures: t.List[TestCase],
|
||||
other_branch_failures: t.List[TestCase],
|
||||
) -> t.Tuple[t.List[TestCase], t.List[TestCase]]:
|
||||
"""
|
||||
Filter the test cases into current branch failures and other branch failures.
|
||||
|
||||
:param cur_branch_failures: List of failed test cases on the current branch.
|
||||
:param other_branch_failures: List of failed test cases on other branches.
|
||||
:return: A tuple containing two lists:
|
||||
- failed_test_cases_cur_branch_only: Test cases that have failed only on the current branch.
|
||||
- failed_test_cases_other_branch_exclude_cur_branch: Test cases that have failed on other branches
|
||||
excluding the current branch.
|
||||
"""
|
||||
cur_branch_unique_failures = []
|
||||
other_branch_failure_map = {tc.name: tc for tc in other_branch_failures}
|
||||
|
||||
for cur_tc in cur_branch_failures:
|
||||
if cur_tc.latest_failed_count > 0 and (
|
||||
cur_tc.name not in other_branch_failure_map
|
||||
or other_branch_failure_map[cur_tc.name].latest_failed_count == 0
|
||||
):
|
||||
cur_branch_unique_failures.append(cur_tc)
|
||||
uniq_fail_names = {cur_tc.name for cur_tc in cur_branch_unique_failures}
|
||||
other_branch_exclusive_failures = [tc for tc in other_branch_failures if tc.name not in uniq_fail_names]
|
||||
|
||||
return cur_branch_unique_failures, other_branch_exclusive_failures
|
||||
|
||||
def get_failed_cases_report_parts(self) -> t.List[str]:
|
||||
"""
|
||||
Generate the report parts for failed test cases and update the additional info section.
|
||||
:return: A list of strings representing the table sections for the failed test cases.
|
||||
"""
|
||||
known_failures = self.get_known_failure_cases()
|
||||
failed_test_cases = self._filter_items(
|
||||
self.test_cases, lambda tc: tc.is_failure and tc.name not in {case.name for case in known_failures}
|
||||
)
|
||||
failed_test_cases_cur_branch = self._sort_items(
|
||||
fetch_failed_testcases_failure_ratio(
|
||||
copy.deepcopy(failed_test_cases),
|
||||
branches_filter={'include_branches': [os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME', '')]},
|
||||
),
|
||||
key='latest_failed_count',
|
||||
)
|
||||
failed_test_cases_other_branch = self._sort_items(
|
||||
fetch_failed_testcases_failure_ratio(
|
||||
copy.deepcopy(failed_test_cases),
|
||||
branches_filter={'exclude_branches': [os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME', '')]},
|
||||
),
|
||||
key='latest_failed_count',
|
||||
)
|
||||
failed_test_cases_cur_branch, failed_test_cases_other_branch = self.filter_test_cases(
|
||||
failed_test_cases_cur_branch, failed_test_cases_other_branch
|
||||
)
|
||||
cur_branch_cases_table_section = self.create_table_section(
|
||||
title=self.report_titles_map['failed_yours'],
|
||||
items=failed_test_cases_cur_branch,
|
||||
headers=[
|
||||
'Test Case',
|
||||
'Test App Path',
|
||||
'Failure Reason',
|
||||
'These test cases failed exclusively on your branch in the latest 40 runs',
|
||||
'Dut Log URL',
|
||||
'Create Known Failure Case Jira',
|
||||
'Job URL',
|
||||
'Grafana URL',
|
||||
],
|
||||
row_attrs=['name', 'app_path', 'failure', 'dut_log_url', 'ci_job_url', 'ci_dashboard_url'],
|
||||
value_functions=[
|
||||
(
|
||||
'These test cases failed exclusively on your branch in the latest 40 runs',
|
||||
lambda item: f'{getattr(item, "latest_failed_count", "")} / '
|
||||
f'{getattr(item, "latest_total_count", "")}',
|
||||
),
|
||||
('Create Known Failure Case Jira', known_failure_issue_jira_fast_link),
|
||||
],
|
||||
)
|
||||
other_branch_cases_table_section = self.create_table_section(
|
||||
title=self.report_titles_map['failed_others'],
|
||||
items=failed_test_cases_other_branch,
|
||||
headers=[
|
||||
'Test Case',
|
||||
'Test App Path',
|
||||
'Failure Reason',
|
||||
'Cases that failed in other branches as well (40 latest testcases)',
|
||||
'Dut Log URL',
|
||||
'Create Known Failure Case Jira',
|
||||
'Job URL',
|
||||
'Grafana URL',
|
||||
],
|
||||
row_attrs=['name', 'app_path', 'failure', 'dut_log_url', 'ci_job_url', 'ci_dashboard_url'],
|
||||
value_functions=[
|
||||
(
|
||||
'Cases that failed in other branches as well (40 latest testcases)',
|
||||
lambda item: f'{getattr(item, "latest_failed_count", "")} '
|
||||
f'/ {getattr(item, "latest_total_count", "")}',
|
||||
),
|
||||
('Create Known Failure Case Jira', known_failure_issue_jira_fast_link),
|
||||
],
|
||||
)
|
||||
known_failures_cases_table_section = self.create_table_section(
|
||||
title=self.report_titles_map['failed_known'],
|
||||
items=known_failures,
|
||||
headers=['Test Case', 'Test App Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
|
||||
row_attrs=['name', 'app_path', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
||||
)
|
||||
failed_cases_report_url = self.write_report_to_file(
|
||||
self.generate_html_report(
|
||||
''.join(
|
||||
cur_branch_cases_table_section
|
||||
+ other_branch_cases_table_section
|
||||
+ known_failures_cases_table_section
|
||||
)
|
||||
),
|
||||
self.job_id,
|
||||
self.failed_cases_report_file,
|
||||
)
|
||||
self.additional_info += self.generate_additional_info_section(
|
||||
self.report_titles_map['failed_yours'], len(failed_test_cases_cur_branch), failed_cases_report_url
|
||||
)
|
||||
self.additional_info += self.generate_additional_info_section(
|
||||
self.report_titles_map['failed_others'], len(failed_test_cases_other_branch), failed_cases_report_url
|
||||
)
|
||||
self.additional_info += self.generate_additional_info_section(
|
||||
self.report_titles_map['failed_known'], len(known_failures), failed_cases_report_url
|
||||
)
|
||||
return cur_branch_cases_table_section + other_branch_cases_table_section + known_failures_cases_table_section
|
||||
|
||||
def get_skipped_cases_report_parts(self) -> t.List[str]:
|
||||
"""
|
||||
Generate the report parts for skipped test cases and update the additional info section.
|
||||
:return: A list of strings representing the table sections for the skipped test cases.
|
||||
"""
|
||||
skipped_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_skipped)
|
||||
skipped_cases_table_section = self.create_table_section(
|
||||
title=self.report_titles_map['skipped'],
|
||||
items=skipped_test_cases,
|
||||
headers=['Test Case', 'Test App Path', 'Skipped Reason', 'Grafana URL'],
|
||||
row_attrs=['name', 'app_path', 'skipped', 'ci_dashboard_url'],
|
||||
)
|
||||
skipped_cases_report_url = self.write_report_to_file(
|
||||
self.generate_html_report(''.join(skipped_cases_table_section)),
|
||||
self.job_id,
|
||||
self.skipped_test_cases_report_file,
|
||||
)
|
||||
self.additional_info += self.generate_additional_info_section(
|
||||
self.report_titles_map['skipped'], len(skipped_test_cases), skipped_cases_report_url
|
||||
)
|
||||
return skipped_cases_table_section
|
||||
|
||||
def get_succeeded_cases_report_parts(self) -> t.List[str]:
|
||||
"""
|
||||
Generate the report parts for succeeded test cases and update the additional info section.
|
||||
:return: A list of strings representing the table sections for the succeeded test cases.
|
||||
"""
|
||||
succeeded_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_success)
|
||||
succeeded_cases_table_section = self.create_table_section(
|
||||
title=self.report_titles_map['succeeded'],
|
||||
items=succeeded_test_cases,
|
||||
headers=['Test Case', 'Test App Path', 'Job URL', 'Grafana URL'],
|
||||
row_attrs=['name', 'app_path', 'ci_job_url', 'ci_dashboard_url'],
|
||||
)
|
||||
succeeded_cases_report_url = self.write_report_to_file(
|
||||
self.generate_html_report(''.join(succeeded_cases_table_section)),
|
||||
self.job_id,
|
||||
self.succeeded_cases_report_file,
|
||||
)
|
||||
self.additional_info += self.generate_additional_info_section(
|
||||
self.report_titles_map['succeeded'],
|
||||
len(succeeded_test_cases),
|
||||
succeeded_cases_report_url,
|
||||
add_permalink=False,
|
||||
)
|
||||
self.additional_info += '\n'
|
||||
return succeeded_cases_table_section
|
||||
|
||||
def _get_report_str(self) -> str:
|
||||
"""
|
||||
Generate a complete HTML report string by processing test cases.
|
||||
:return: Complete HTML report string.
|
||||
"""
|
||||
self.additional_info = f'**Test Case Summary ({self.get_commit_summary}):**\n'
|
||||
failed_cases_report_parts = self.get_failed_cases_report_parts()
|
||||
skipped_cases_report_parts = self.get_skipped_cases_report_parts()
|
||||
succeeded_cases_report_parts = self.get_succeeded_cases_report_parts()
|
||||
|
||||
return self.generate_html_report(
|
||||
''.join(failed_cases_report_parts + skipped_cases_report_parts + succeeded_cases_report_parts)
|
||||
)
|
||||
|
||||
|
||||
class JobReportGenerator(ReportGenerator):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -934,7 +686,7 @@ class JobReportGenerator(ReportGenerator):
|
||||
local_commit_id: str,
|
||||
*,
|
||||
title: str = 'Job Report',
|
||||
jobs: t.List[GitlabJob],
|
||||
jobs: list[GitlabJob],
|
||||
):
|
||||
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, local_commit_id, title=title)
|
||||
self.jobs = jobs
|
||||
|
||||
@@ -13,10 +13,8 @@ from idf_ci_local.app import enrich_apps_with_metrics_info
|
||||
|
||||
from dynamic_pipelines.report import BuildReportGenerator
|
||||
from dynamic_pipelines.report import JobReportGenerator
|
||||
from dynamic_pipelines.report import TargetTestReportGenerator
|
||||
from dynamic_pipelines.utils import fetch_app_metrics
|
||||
from dynamic_pipelines.utils import fetch_failed_jobs
|
||||
from dynamic_pipelines.utils import parse_testcases_from_filepattern
|
||||
|
||||
|
||||
def main() -> None:
|
||||
@@ -25,7 +23,6 @@ def main() -> None:
|
||||
|
||||
report_actions: dict[str, t.Callable[[argparse.Namespace], None]] = {
|
||||
'build': generate_build_report,
|
||||
'target_test': generate_target_test_report,
|
||||
'job': generate_jobs_report,
|
||||
}
|
||||
|
||||
@@ -39,7 +36,7 @@ def main() -> None:
|
||||
def setup_argument_parser() -> argparse.ArgumentParser:
|
||||
report_type_parser: argparse.ArgumentParser = argparse.ArgumentParser(add_help=False)
|
||||
report_type_parser.add_argument(
|
||||
'--report-type', choices=['build', 'target_test', 'job'], required=True, help='Type of report to generate'
|
||||
'--report-type', choices=['build', 'job'], required=True, help='Type of report to generate'
|
||||
)
|
||||
report_type_args: argparse.Namespace
|
||||
remaining_args: list[str]
|
||||
@@ -104,24 +101,6 @@ def generate_build_report(args: argparse.Namespace) -> None:
|
||||
report_generator.post_report()
|
||||
|
||||
|
||||
def generate_target_test_report(args: argparse.Namespace) -> None:
|
||||
test_cases: list[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
||||
report_generator = TargetTestReportGenerator(
|
||||
args.project_id,
|
||||
args.mr_iid,
|
||||
args.pipeline_id,
|
||||
args.job_id,
|
||||
args.commit_id,
|
||||
args.local_commit_id,
|
||||
test_cases=test_cases,
|
||||
)
|
||||
report_generator.post_report()
|
||||
|
||||
if GitlabEnvVars().IDF_CI_IS_DEBUG_PIPELINE:
|
||||
print('Debug pipeline detected, exit non-zero to fail the pipeline in order to block merge')
|
||||
exit(30)
|
||||
|
||||
|
||||
def generate_jobs_report(args: argparse.Namespace) -> None:
|
||||
jobs: list[t.Any] = fetch_failed_jobs(args.commit_id)
|
||||
|
||||
@@ -133,6 +112,10 @@ def generate_jobs_report(args: argparse.Namespace) -> None:
|
||||
)
|
||||
report_generator.post_report(print_retry_jobs_message=any(job.is_failed for job in jobs))
|
||||
|
||||
if GitlabEnvVars().IDF_CI_IS_DEBUG_PIPELINE:
|
||||
print('Debug pipeline detected, exit non-zero to fail the pipeline in order to block merge')
|
||||
exit(30)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
all_test_finished:
|
||||
stage: .post
|
||||
tags: [fast_run, shiny]
|
||||
image: $ESP_ENV_IMAGE
|
||||
when: always
|
||||
# this job is used to check if all target test jobs are finished
|
||||
# because the `needs` make the later jobs run even if the previous stage are not finished
|
||||
# and there's no `needs: stage` for now in gitlab
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/220758
|
||||
artifacts:
|
||||
untracked: true
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
before_script: []
|
||||
script:
|
||||
- echo "all test jobs finished"
|
||||
|
||||
generate_pytest_report:
|
||||
stage: .post
|
||||
tags: [build, shiny]
|
||||
image: $ESP_ENV_IMAGE
|
||||
when: always
|
||||
needs:
|
||||
- all_test_finished
|
||||
- pipeline: $PARENT_PIPELINE_ID
|
||||
job: pipeline_variables
|
||||
artifacts:
|
||||
paths:
|
||||
- target_test_report.html
|
||||
- failed_cases.html
|
||||
- skipped_cases.html
|
||||
- succeeded_cases.html
|
||||
expire_in: 2 week
|
||||
when: always
|
||||
script:
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
|
||||
- python tools/ci/previous_stage_job_status.py --stage target_test
|
||||
@@ -1,200 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="2" failures="0" hostname="FA002598" name="pytest" skipped="0" tests="2" time="22.981" timestamp="2024-05-17T17:51:26.669364">
|
||||
<testcase classname="components.driver.test_apps.i2c_test_apps.pytest_i2c" app_path="components/driver/test_apps/i2c_test_apps" file="components/driver/test_apps/i2c_test_apps/pytest_i2c.py" line="21" name="('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device/dut.txt" time="11.910">
|
||||
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||
yield CaseTester(dut, **kwargs)
|
||||
tools/ci/idf_unity_tester.py:202: in __init__
|
||||
self._manager = Manager()
|
||||
/usr/lib/python3.9/multiprocessing/context.py:57: in Manager
|
||||
m.start()
|
||||
/usr/lib/python3.9/multiprocessing/managers.py:557: in start
|
||||
self._address = reader.recv()
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:255: in recv
|
||||
buf = self._recv_bytes()
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes
|
||||
buf = self._recv(4)
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv
|
||||
raise EOFError
|
||||
E EOFError</error>
|
||||
</testcase>
|
||||
<testcase classname="components.driver.test_apps.i2s_test_apps.i2s_multi_dev.pytest_i2s_multi_dev" app_path="components/driver/test_apps/i2s_test_apps/i2s_multi_dev" file="components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py" line="5" name="('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev/dut.txt" time="11.071">
|
||||
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||
yield CaseTester(dut, **kwargs)
|
||||
tools/ci/idf_unity_tester.py:202: in __init__
|
||||
self._manager = Manager()
|
||||
/usr/lib/python3.9/multiprocessing/context.py:57: in Manager
|
||||
m.start()
|
||||
/usr/lib/python3.9/multiprocessing/managers.py:557: in start
|
||||
self._address = reader.recv()
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:255: in recv
|
||||
buf = self._recv_bytes()
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:419: in _recv_bytes
|
||||
buf = self._recv(4)
|
||||
/usr/lib/python3.9/multiprocessing/connection.py:388: in _recv
|
||||
raise EOFError
|
||||
E EOFError</error>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite errors="0" failures="1" hostname="GX64-C2-SH-1-ITS1N4" name="pytest" skipped="0" tests="3" time="101.163" timestamp="2024-05-17T17:52:04.061589">
|
||||
<testcase classname="components.vfs.test_apps.pytest_vfs" app_path="components/vfs/test_apps" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.default.test_vfs_default" dut_log_url="https://https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.default.test_vfs_default/dut.txt" time="30.044"/>
|
||||
<testcase classname="components.vfs.test_apps.pytest_vfs" app_path="components/vfs/test_apps" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.iram.test_vfs_default" dut_log_url="https://https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.iram.test_vfs_default/dut.txt" time="28.323"/>
|
||||
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" app_path="components/wpa_supplicant/test_apps" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c2.default.test_wpa_supplicant_ut" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.default.test_wpa_supplicant_ut/dut.txt" time="42.796">
|
||||
<failure message="AssertionError: Unity test failed">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1272: in pytest_runtest_call
|
||||
self._raise_dut_failed_cases_if_exists(duts) # type: ignore
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1207: in _raise_dut_failed_cases_if_exists
|
||||
raise AssertionError('Unity test failed')
|
||||
E AssertionError: Unity test failed</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite errors="0" failures="0" hostname="runner-zmdq2hnf-project-103-concurrent-3" name="pytest" skipped="1" tests="8" time="123.596" timestamp="2024-05-17T03:04:11.412971">
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="114" name="test_python_interpreter_unix" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_python_interpreter_unix/dut.txt" time="7.523"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="133" name="test_python_interpreter_win" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_python_interpreter_win/dut.txt" time="0.000">
|
||||
<skipped message="Linux does not support executing .exe files" type="pytest.skip">/builds/espressif/esp-idf/tools/test_build_system/test_common.py:134: Linux does not support executing .exe files</skipped>
|
||||
</testcase>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="147" name="test_invoke_confserver" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_invoke_confserver/dut.txt" time="10.179"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="153" name="test_ccache_used_to_build" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_ccache_used_to_build/dut.txt" time="23.713"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="171" name="test_toolchain_prefix_in_description_file" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_toolchain_prefix_in_description_file/dut.txt" time="8.390"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="178" name="test_subcommands_with_options" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_subcommands_with_options/dut.txt" time="28.118"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="194" name="test_fallback_to_build_system_target" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_fallback_to_build_system_target/dut.txt" time="11.687"/>
|
||||
<testcase classname="test_common" app_path="tools" file="test_common.py" line="203" name="test_create_component_project" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_create_component_project/dut.txt" time="33.986"/>
|
||||
</testsuite>
|
||||
<testsuite errors="0" failures="4" hostname="FA002285" name="pytest" skipped="0" tests="4" time="231.048" timestamp="2024-05-17T17:50:02.291973">
|
||||
<testcase classname="components.esp_timer.test_apps.pytest_esp_timer_ut" app_path="components/esp_timer/test_apps" file="components/esp_timer/test_apps/pytest_esp_timer_ut.py" line="20" name="esp32c3.release.test_esp_timer" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_esp_timer/dut.txt" time="39.686">
|
||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||
Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||
index = func(self, pattern, *args, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact
|
||||
return self.pexpect_proc.expect_exact(pattern, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact
|
||||
return exp.expect_loop(timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||
return self.timeout(e)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||
raise exc
|
||||
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0>
|
||||
E searcher: searcher_string:
|
||||
E 0: b'Press ENTER to see the list of tests'
|
||||
E <pytest_embedded.log.PexpectProcess object at 0x7f47b1ab46a0>
|
||||
E searcher: searcher_string:
|
||||
E 0: b'Press ENTER to see the list of tests'
|
||||
|
||||
The above exception was the direct cause of the following exception:
|
||||
components/esp_timer/test_apps/pytest_esp_timer_ut.py:24: in test_esp_timer
|
||||
dut.run_all_single_board_cases(timeout=120)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases
|
||||
for case in self.test_menu:
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu
|
||||
self._test_menu = self._parse_test_menu()
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu
|
||||
self.expect_exact(ready_line)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||
raise e.__class__(debug_str) from e
|
||||
E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||
E Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</failure>
|
||||
</testcase>
|
||||
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" app_path="components/wear_levelling/test_apps" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.512safe.test_wear_levelling" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.512safe.test_wear_levelling/dut.txt" time="69.850">
|
||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||
index = func(self, pattern, *args, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect
|
||||
return self.pexpect_proc.expect(pattern, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect
|
||||
return self.expect_list(compiled_pattern_list,
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list
|
||||
return exp.expect_loop(timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||
return self.timeout(e)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||
raise exc
|
||||
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80>
|
||||
E searcher: searcher_re:
|
||||
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||
E <pytest_embedded.log.PexpectProcess object at 0x7f47af653b80>
|
||||
E searcher: searcher_re:
|
||||
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||
|
||||
The above exception was the direct cause of the following exception:
|
||||
components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling
|
||||
dut.expect_unity_test_output()
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output
|
||||
self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||
raise e.__class__(debug_str) from e
|
||||
E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</failure>
|
||||
</testcase>
|
||||
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" app_path="components/wear_levelling/test_apps" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.release.test_wear_levelling" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_wear_levelling/dut.txt" time="70.304">
|
||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||
index = func(self, pattern, *args, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:131: in expect
|
||||
return self.pexpect_proc.expect(pattern, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:354: in expect
|
||||
return self.expect_list(compiled_pattern_list,
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:383: in expect_list
|
||||
return exp.expect_loop(timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||
return self.timeout(e)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||
raise exc
|
||||
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0>
|
||||
E searcher: searcher_re:
|
||||
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||
E <pytest_embedded.log.PexpectProcess object at 0x7f47afc540d0>
|
||||
E searcher: searcher_re:
|
||||
E 0: re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)')
|
||||
|
||||
The above exception was the direct cause of the following exception:
|
||||
components/wear_levelling/test_apps/pytest_wear_levelling.py:18: in test_wear_levelling
|
||||
dut.expect_unity_test_output()
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:180: in expect_unity_test_output
|
||||
self.expect(UNITY_SUMMARY_LINE_REGEX, timeout=timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||
raise e.__class__(debug_str) from e
|
||||
E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</failure>
|
||||
</testcase>
|
||||
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" app_path="components/wpa_supplicant/test_apps" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c3.default.test_wpa_supplicant_ut" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.default.test_wpa_supplicant_ut/dut.txt" time="51.208">
|
||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||
Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||
index = func(self, pattern, *args, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:153: in expect_exact
|
||||
return self.pexpect_proc.expect_exact(pattern, **kwargs)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/spawnbase.py:432: in expect_exact
|
||||
return exp.expect_loop(timeout)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:181: in expect_loop
|
||||
return self.timeout(e)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pexpect/expect.py:144: in timeout
|
||||
raise exc
|
||||
E pexpect.exceptions.TIMEOUT: <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0>
|
||||
E searcher: searcher_string:
|
||||
E 0: b'Press ENTER to see the list of tests'
|
||||
E <pytest_embedded.log.PexpectProcess object at 0x7f47afc54df0>
|
||||
E searcher: searcher_string:
|
||||
E 0: b'Press ENTER to see the list of tests'
|
||||
|
||||
The above exception was the direct cause of the following exception:
|
||||
components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py:17: in test_wpa_supplicant_ut
|
||||
dut.run_all_single_board_cases()
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:435: in run_all_single_board_cases
|
||||
for case in self.test_menu:
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:243: in test_menu
|
||||
self._test_menu = self._parse_test_menu()
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded_idf/unity_tester.py:124: in _parse_test_menu
|
||||
self.expect_exact(ready_line)
|
||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:83: in wrapper
|
||||
raise e.__class__(debug_str) from e
|
||||
E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||
E Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -1,9 +0,0 @@
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/panic", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/panic/sdkconfig.ci.coredump_flash_capture_dram", "config_name": "coredump_flash_capture_dram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "build failed", "build_comment": "Compilation error", "cmake_vars": {}, "work_dir": "tools/test_apps/system/panic", "build_dir": "build_esp32s3_coredump_flash_capture_dram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/ram_loadable_app", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/ram_loadable_app/sdkconfig.ci.defaults", "config_name": "defaults", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/ram_loadable_app", "build_dir": "build_esp32_defaults", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/ram_loadable_app", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/ram_loadable_app/sdkconfig.ci.pure_ram", "config_name": "pure_ram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/ram_loadable_app", "build_dir": "build_esp32_pure_ram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/startup", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/startup/sdkconfig.ci.flash_80m_qio", "config_name": "flash_80m_qio", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/startup", "build_dir": "build_esp32_flash_80m_qio", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/startup", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/startup/sdkconfig.ci.stack_check_verbose_log", "config_name": "stack_check_verbose_log", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/startup", "build_dir": "build_esp32s3_stack_check_verbose_log", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/test_watchpoint", "target": "esp32", "sdkconfig_path": null, "config_name": "default", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "skipped", "build_comment": "Skipped due to unmet dependencies", "cmake_vars": {}, "work_dir": "tools/test_apps/system/test_watchpoint", "build_dir": "build_esp32_default", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/test_watchpoint", "target": "esp32c3", "sdkconfig_path": null, "config_name": "default", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "skipped", "build_comment": "Skipped due to unmet dependencies", "cmake_vars": {}, "work_dir": "tools/test_apps/system/test_watchpoint", "build_dir": "build_esp32c3_default", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/unicore_bootloader", "target": "esp32", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/unicore_bootloader/sdkconfig.ci.multicore", "config_name": "multicore", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": false, "copy_sdkconfig": false, "index": null, "build_status": "build failed", "build_comment": "Compilation error", "cmake_vars": {}, "work_dir": "tools/test_apps/system/unicore_bootloader", "build_dir": "build_esp32_multicore", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
{"build_system": "idf_cmake", "app_dir": "tools/test_apps/system/unicore_bootloader", "target": "esp32s3", "sdkconfig_path": "/builds/espressif/esp-idf/tools/test_apps/system/unicore_bootloader/sdkconfig.ci.unicore_psram", "config_name": "unicore_psram", "sdkconfig_defaults_str": null, "dry_run": false, "verbose": false, "check_warnings": true, "preserve": true, "copy_sdkconfig": false, "index": null, "build_status": "build success", "build_comment": null, "cmake_vars": {}, "work_dir": "tools/test_apps/system/unicore_bootloader", "build_dir": "build_esp32s3_unicore_psram", "build_log_filename": "build_log.txt", "size_json_filename": "size.json"}
|
||||
@@ -1,146 +0,0 @@
|
||||
{
|
||||
"tools/test_apps/system/panic_coredump_flash_capture_dram_esp32s3": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "panic",
|
||||
"config_name": "coredump_flash_capture_dram",
|
||||
"target": "esp32s3",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 156936,
|
||||
"target_value": 162936,
|
||||
"difference": 6000,
|
||||
"difference_percentage": 3.82
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/panic_coredump"
|
||||
},
|
||||
"tools/test_apps/system/ram_loadable_app_defaults_esp32": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "ram_loadable_app",
|
||||
"config_name": "defaults",
|
||||
"target": "esp32",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 171448,
|
||||
"target_value": 173000,
|
||||
"difference": 1552,
|
||||
"difference_percentage": 0.91
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/ram_loadable_app"
|
||||
},
|
||||
"tools/test_apps/system/ram_loadable_app_pure_ram_esp32": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "ram_loadable_app",
|
||||
"config_name": "pure_ram",
|
||||
"target": "esp32",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 156632,
|
||||
"target_value": 158200,
|
||||
"difference": 1568,
|
||||
"difference_percentage": 1.0
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/ram_loadable_app"
|
||||
},
|
||||
"tools/test_apps/system/startup_flash_80m_qio_esp32": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "startup",
|
||||
"config_name": "flash_80m_qio",
|
||||
"target": "esp32",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 225692,
|
||||
"target_value": 230000,
|
||||
"difference": 4308,
|
||||
"difference_percentage": 1.91
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/startup"
|
||||
},
|
||||
"tools/test_apps/system/startup_stack_check_verbose_log_esp32s3": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "startup",
|
||||
"config_name": "stack_check_verbose_log",
|
||||
"target": "esp32s3",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 156936,
|
||||
"target_value": 160000,
|
||||
"difference": 3064,
|
||||
"difference_percentage": 1.95
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/startup"
|
||||
},
|
||||
"tools/test_apps/system/test_watchpoint_default_esp32": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "test_watchpoint",
|
||||
"config_name": "default",
|
||||
"target": "esp32",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 147896,
|
||||
"target_value": 150000,
|
||||
"difference": 2104,
|
||||
"difference_percentage": 1.42
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/test_watchpoint"
|
||||
},
|
||||
"tools/test_apps/system/test_watchpoint_default_esp32c3": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "test_watchpoint",
|
||||
"config_name": "default",
|
||||
"target": "esp32c3",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 189456,
|
||||
"target_value": 190456,
|
||||
"difference": 1000,
|
||||
"difference_percentage": 0.53
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/test_watchpoint"
|
||||
},
|
||||
"tools/test_apps/system/unicore_bootloader_multicore_esp32": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "unicore_bootloader",
|
||||
"config_name": "multicore",
|
||||
"target": "esp32",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 216784,
|
||||
"target_value": 220000,
|
||||
"difference": 3216,
|
||||
"difference_percentage": 1.48
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/unicore_bootloader"
|
||||
},
|
||||
"tools/test_apps/system/unicore_bootloader_unicore_psram_esp32s3": {
|
||||
"source_commit_id": "bacfa4aa59a37b70b800f1758106fa5f5af99f16",
|
||||
"target_commit_id": "36d5d8c31c7d3332b43bd5fe4d40b515c6a71097",
|
||||
"app_name": "unicore_bootloader",
|
||||
"config_name": "unicore_psram",
|
||||
"target": "esp32s3",
|
||||
"metrics": {
|
||||
"binary_size": {
|
||||
"source_value": 189456,
|
||||
"target_value": 191456,
|
||||
"difference": 2000,
|
||||
"difference_percentage": 1.06
|
||||
}
|
||||
},
|
||||
"app_path": "tools/test_apps/system/unicore_bootloader"
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"jobs": [
|
||||
{"failed_count": 2, "failure_ratio": 0.2, "total_count": 10, "name": "build_clang_test_apps_esp32h2"},
|
||||
{"failed_count": 3, "failure_ratio": 0.3, "total_count": 10, "name": "build_template_app"},
|
||||
{"failed_count": 4, "failure_ratio": 0.4, "total_count": 10, "name": "check_public_headers"}
|
||||
]
|
||||
}
|
||||
@@ -1,212 +0,0 @@
|
||||
{
|
||||
"jobs": [
|
||||
{
|
||||
"duration_sec": 42.158688,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:38:16, 24 May 2024",
|
||||
"id": 48838677,
|
||||
"name": "check_pre_commit",
|
||||
"pending_sec": 1.15148,
|
||||
"runner_name": "FA002598-build",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838677"
|
||||
},
|
||||
{
|
||||
"duration_sec": 35.438477,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:37:32, 24 May 2024",
|
||||
"id": 48838675,
|
||||
"name": "run-danger-mr-linter",
|
||||
"pending_sec": 0.371668,
|
||||
"runner_name": "BrnoVM0211",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838675"
|
||||
},
|
||||
{
|
||||
"duration_sec": 30.202475,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:37:28, 24 May 2024",
|
||||
"id": 48838682,
|
||||
"name": "check_esp_system",
|
||||
"pending_sec": 1.148756,
|
||||
"runner_name": "ruby6-cent9 [32]",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838682"
|
||||
},
|
||||
{
|
||||
"duration_sec": 33.75121,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:37:31, 24 May 2024",
|
||||
"id": 48838679,
|
||||
"name": "check_blobs",
|
||||
"pending_sec": 0.725292,
|
||||
"runner_name": "gem3-cent9 [32]",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838679"
|
||||
},
|
||||
{
|
||||
"duration_sec": 121.84324,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:41:35, 24 May 2024",
|
||||
"id": 48838687,
|
||||
"name": "code_quality_check",
|
||||
"pending_sec": 0.271973,
|
||||
"runner_name": "ruby15-cent9 [32]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838687"
|
||||
},
|
||||
{
|
||||
"duration_sec": 153.68849,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:39:32, 24 May 2024",
|
||||
"id": 48838686,
|
||||
"name": "fast_template_app",
|
||||
"pending_sec": 2.319577,
|
||||
"runner_name": "FA002598-build",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838686"
|
||||
},
|
||||
{
|
||||
"duration_sec": 25.572954,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:37:23, 24 May 2024",
|
||||
"id": 48838684,
|
||||
"name": "check_configure_ci_environment_parsing",
|
||||
"pending_sec": 1.184287,
|
||||
"runner_name": "gem3-cent9 [32]",
|
||||
"stage": "pre_check",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838684"
|
||||
},
|
||||
{
|
||||
"duration_sec": 120.95287,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:41:34, 24 May 2024",
|
||||
"id": 48838690,
|
||||
"name": "build_clang_test_apps_esp32s3",
|
||||
"pending_sec": 0.671956,
|
||||
"runner_name": "ruby7-cent9 [32]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838690"
|
||||
},
|
||||
{
|
||||
"duration_sec": 165.74513,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:42:19, 24 May 2024",
|
||||
"id": 48838692,
|
||||
"name": "build_clang_test_apps_esp32c2",
|
||||
"pending_sec": 0.82007,
|
||||
"runner_name": "PowerfulBuildRunner03 [16]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838692"
|
||||
},
|
||||
{
|
||||
"duration_sec": 95.72326,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:41:09, 24 May 2024",
|
||||
"id": 48838696,
|
||||
"name": "build_clang_test_apps_esp32p4",
|
||||
"pending_sec": 0.567116,
|
||||
"runner_name": "gem3-cent9 [32]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838696"
|
||||
},
|
||||
{
|
||||
"duration_sec": 122.19848,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:41:36, 24 May 2024",
|
||||
"id": 48838691,
|
||||
"name": "build_clang_test_apps_esp32c3",
|
||||
"pending_sec": 0.709112,
|
||||
"runner_name": "ruby6-cent9 [32]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838691"
|
||||
},
|
||||
{
|
||||
"duration_sec": 148.09895,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:42:02, 24 May 2024",
|
||||
"id": 48838694,
|
||||
"name": "build_clang_test_apps_esp32c5",
|
||||
"pending_sec": 0.779584,
|
||||
"runner_name": "PowerfulBuildRunner04 [15]",
|
||||
"stage": "build",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838694"
|
||||
},
|
||||
{
|
||||
"duration_sec": 20.275927,
|
||||
"failure_log": null,
|
||||
"failure_reason": null,
|
||||
"finished_at": "03:39:54, 24 May 2024",
|
||||
"id": 48838699,
|
||||
"name": "gen_integration_pipeline",
|
||||
"pending_sec": 0.868898,
|
||||
"runner_name": "FA002598-build",
|
||||
"stage": "assign_test",
|
||||
"status": "success",
|
||||
"url": "https://test.com/-/jobs/48838699"
|
||||
},
|
||||
{
|
||||
"duration_sec": 103.08849,
|
||||
"failure_log": "Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
|
||||
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
|
||||
"finished_at": "03:41:17, 24 May 2024",
|
||||
"id": 48838695,
|
||||
"name": "build_clang_test_apps_esp32h2",
|
||||
"pending_sec": 0.765111,
|
||||
"runner_name": "gem2-cent9 [32]",
|
||||
"stage": "build",
|
||||
"status": "failed",
|
||||
"url": "https://test.com/-/jobs/48838695"
|
||||
},
|
||||
{
|
||||
"duration_sec": 634.59467,
|
||||
"failure_log": "Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
|
||||
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
|
||||
"finished_at": "03:50:09, 24 May 2024",
|
||||
"id": 48838704,
|
||||
"name": "build_template_app",
|
||||
"pending_sec": 0.161796,
|
||||
"runner_name": "ruby6-cent9 [32]",
|
||||
"stage": "host_test",
|
||||
"status": "failed",
|
||||
"url": "https://test.com/-/jobs/48838704"
|
||||
},
|
||||
{
|
||||
"duration_sec": 1060.0835,
|
||||
"failure_log": "Some Failure Log",
|
||||
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
|
||||
"finished_at": "03:55:14, 24 May 2024",
|
||||
"id": 48838705,
|
||||
"name": "check_public_headers",
|
||||
"pending_sec": 0.449408,
|
||||
"runner_name": "ruby6-cent9 [32]",
|
||||
"stage": "host_test",
|
||||
"status": "failed",
|
||||
"url": "https://test.com/-/jobs/48838705"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,325 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import json
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
from idf_build_apps import json_list_files_to_apps
|
||||
|
||||
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
|
||||
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
|
||||
|
||||
from idf_build_apps.constants import BuildStatus # noqa: E402
|
||||
from idf_ci_local.app import enrich_apps_with_metrics_info # noqa: E402
|
||||
|
||||
from dynamic_pipelines.models import GitlabJob # noqa: E402
|
||||
from dynamic_pipelines.report import BuildReportGenerator # noqa: E402
|
||||
from dynamic_pipelines.report import JobReportGenerator # noqa: E402
|
||||
from dynamic_pipelines.report import TargetTestReportGenerator # noqa: E402
|
||||
from dynamic_pipelines.utils import load_file # noqa: E402
|
||||
from dynamic_pipelines.utils import parse_testcases_from_filepattern # noqa: E402
|
||||
|
||||
|
||||
class TestReportGeneration(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.reports_sample_data_path = os.path.join(
|
||||
os.environ.get('IDF_PATH', ''),
|
||||
'tools',
|
||||
'ci',
|
||||
'dynamic_pipelines',
|
||||
'tests',
|
||||
'test_report_generator',
|
||||
'reports_sample_data',
|
||||
)
|
||||
self.setup_patches()
|
||||
self.load_test_and_job_reports()
|
||||
self.create_report_generators()
|
||||
|
||||
def setup_patches(self) -> None:
|
||||
self.gitlab_patcher = patch('dynamic_pipelines.report.Gitlab')
|
||||
self.failure_rate_patcher = patch('dynamic_pipelines.report.fetch_failed_testcases_failure_ratio')
|
||||
self.env_patcher = patch.dict(
|
||||
'os.environ',
|
||||
{
|
||||
'CI_DASHBOARD_HOST': 'https://test_dashboard_host',
|
||||
'CI_PAGES_URL': 'https://artifacts_path',
|
||||
'CI_JOB_ID': '1',
|
||||
'JIRA_SERVER': 'https://jira.com',
|
||||
},
|
||||
)
|
||||
self.yaml_dump_patcher = patch('dynamic_pipelines.report.yaml.dump')
|
||||
|
||||
self.MockGitlab = self.gitlab_patcher.start()
|
||||
self.test_cases_failure_rate = self.failure_rate_patcher.start()
|
||||
self.env_patcher.start()
|
||||
self.yaml_dump_patcher.start()
|
||||
|
||||
self.mock_project = MagicMock()
|
||||
self.mock_mr = MagicMock()
|
||||
self.MockGitlab.return_value.project = self.mock_project
|
||||
self.mock_project.mergerequests.get.return_value = self.mock_mr
|
||||
|
||||
self.addCleanup(self.gitlab_patcher.stop)
|
||||
self.addCleanup(self.failure_rate_patcher.stop)
|
||||
self.addCleanup(self.env_patcher.stop)
|
||||
self.addCleanup(self.yaml_dump_patcher.stop)
|
||||
self.addCleanup(self.cleanup_files)
|
||||
|
||||
def cleanup_files(self) -> None:
|
||||
files_to_delete = [
|
||||
self.target_test_report_generator.skipped_test_cases_report_file,
|
||||
self.target_test_report_generator.succeeded_cases_report_file,
|
||||
self.target_test_report_generator.failed_cases_report_file,
|
||||
self.build_report_generator.failed_apps_report_file,
|
||||
self.build_report_generator.built_apps_report_file,
|
||||
self.build_report_generator.skipped_apps_report_file,
|
||||
]
|
||||
for file_path in files_to_delete:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
def load_test_and_job_reports(self) -> None:
|
||||
self.expected_target_test_report_html = load_file(
|
||||
os.path.join(self.reports_sample_data_path, 'expected_target_test_report.html')
|
||||
)
|
||||
self.expected_job_report_html = load_file(
|
||||
os.path.join(self.reports_sample_data_path, 'expected_job_report.html')
|
||||
)
|
||||
self.expected_build_report_html = load_file(
|
||||
os.path.join(self.reports_sample_data_path, 'expected_build_report.html')
|
||||
)
|
||||
|
||||
def create_report_generators(self) -> None:
|
||||
jobs_response_raw = load_file(os.path.join(self.reports_sample_data_path, 'jobs_api_response.json'))
|
||||
failure_rate_jobs_response = load_file(
|
||||
os.path.join(self.reports_sample_data_path, 'failure_rate_jobs_response.json')
|
||||
)
|
||||
built_apps_size_info_response = json.loads(
|
||||
load_file(os.path.join(self.reports_sample_data_path, 'apps_size_info_api_response.json'))
|
||||
)
|
||||
failure_rates = {item['name']: item for item in json.loads(failure_rate_jobs_response).get('jobs', [])}
|
||||
jobs = [
|
||||
GitlabJob.from_json_data(job_json, failure_rates.get(job_json['name'], {}))
|
||||
for job_json in json.loads(jobs_response_raw)['jobs']
|
||||
]
|
||||
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
||||
apps = enrich_apps_with_metrics_info(
|
||||
built_apps_size_info_response,
|
||||
json_list_files_to_apps([os.path.join(self.reports_sample_data_path, 'apps')]),
|
||||
)
|
||||
self.target_test_report_generator = TargetTestReportGenerator(
|
||||
project_id=123,
|
||||
mr_iid=1,
|
||||
pipeline_id=456,
|
||||
job_id=0,
|
||||
commit_id='cccc',
|
||||
title='Test Report',
|
||||
test_cases=test_cases,
|
||||
)
|
||||
self.job_report_generator = JobReportGenerator(
|
||||
project_id=123, mr_iid=1, pipeline_id=456, job_id=0, commit_id='cccc', title='Job Report', jobs=jobs
|
||||
)
|
||||
self.build_report_generator = BuildReportGenerator(
|
||||
project_id=123, mr_iid=1, pipeline_id=456, job_id=0, commit_id='cccc', title='Build Report', apps=apps
|
||||
)
|
||||
self.target_test_report_generator._known_failure_cases_set = {
|
||||
'*.test_wpa_supplicant_ut',
|
||||
'esp32c3.release.test_esp_timer',
|
||||
'*.512safe.test_wear_levelling',
|
||||
}
|
||||
test_cases_failed = [tc for tc in test_cases if tc.is_failure]
|
||||
for index, tc in enumerate(test_cases_failed):
|
||||
tc.latest_total_count = 40
|
||||
if index % 3 == 0:
|
||||
tc.latest_failed_count = 0
|
||||
else:
|
||||
tc.latest_failed_count = 3
|
||||
self.test_cases_failure_rate.return_value = test_cases_failed
|
||||
|
||||
def test_known_failure_cases(self) -> None:
|
||||
known_failure_cases = self.target_test_report_generator.get_known_failure_cases()
|
||||
self.assertEqual(len(known_failure_cases), 4)
|
||||
|
||||
def test_failed_cases_in_target_test_report(self) -> None:
|
||||
known_failures = self.target_test_report_generator.get_known_failure_cases()
|
||||
known_failure_case_names = {case.name for case in known_failures}
|
||||
failed_testcases = self.target_test_report_generator._filter_items(
|
||||
self.target_test_report_generator.test_cases,
|
||||
lambda tc: tc.is_failure and tc.name not in known_failure_case_names,
|
||||
)
|
||||
self.assertEqual(len(failed_testcases), 3)
|
||||
|
||||
def test_skipped_cases_in_target_test_report(self) -> None:
|
||||
skipped_testcases = self.target_test_report_generator._filter_items(
|
||||
self.target_test_report_generator.test_cases, lambda tc: tc.is_skipped
|
||||
)
|
||||
self.assertEqual(len(skipped_testcases), 1)
|
||||
|
||||
def test_successful_cases_in_target_test_report(self) -> None:
|
||||
succeeded_testcases = self.target_test_report_generator._filter_items(
|
||||
self.target_test_report_generator.test_cases, lambda tc: tc.is_success
|
||||
)
|
||||
self.assertEqual(len(succeeded_testcases), 9)
|
||||
|
||||
def test_target_test_report_html_structure(self) -> None:
|
||||
report = self.target_test_report_generator._get_report_str()
|
||||
self.assertEqual(report, self.expected_target_test_report_html)
|
||||
|
||||
def test_failed_jobs_in_job_report(self) -> None:
|
||||
failed_jobs = self.job_report_generator._filter_items(self.job_report_generator.jobs, lambda job: job.is_failed)
|
||||
self.assertEqual(len(failed_jobs), 3)
|
||||
|
||||
def test_successful_jobs_in_job_report(self) -> None:
|
||||
succeeded_jobs = self.job_report_generator._filter_items(
|
||||
self.job_report_generator.jobs, lambda job: job.is_success
|
||||
)
|
||||
self.assertEqual(len(succeeded_jobs), 13)
|
||||
|
||||
def test_job_report_html_structure(self) -> None:
|
||||
report = self.job_report_generator._get_report_str()
|
||||
self.assertEqual(report, self.expected_job_report_html)
|
||||
|
||||
def test_generate_top_n_apps_by_size_table(self) -> None:
|
||||
apps_with_size_diff = [
|
||||
MagicMock(
|
||||
app_dir=f'app_dir_{i}',
|
||||
build_dir=f'build_dir_{i}',
|
||||
build_status=BuildStatus.SUCCESS,
|
||||
metrics={
|
||||
'binary_size': MagicMock(
|
||||
source=i * 10000,
|
||||
target=i * 10000 + i * 1000,
|
||||
difference=i * 1000,
|
||||
difference_percentage=i * 0.5,
|
||||
)
|
||||
},
|
||||
)
|
||||
for i in range(1, 6)
|
||||
]
|
||||
build_report_generator = BuildReportGenerator(
|
||||
project_id=123,
|
||||
mr_iid=1,
|
||||
pipeline_id=456,
|
||||
job_id=0,
|
||||
commit_id='cccc',
|
||||
title='Build Report',
|
||||
apps=apps_with_size_diff,
|
||||
)
|
||||
|
||||
top_apps_table = build_report_generator._generate_top_n_apps_by_size_table()
|
||||
|
||||
self.assertIn('| App Dir | Build Dir | Size Diff (bytes) | Size Diff (%) |', top_apps_table)
|
||||
self.assertIn('| app_dir_5 | build_dir_5 | 5000 | 2.5% |', top_apps_table)
|
||||
self.assertIn('| app_dir_1 | build_dir_1 | 1000 | 0.5% |', top_apps_table)
|
||||
|
||||
def test_get_built_apps_report_parts(self) -> None:
|
||||
apps = [
|
||||
MagicMock(
|
||||
app_dir='test_app_1',
|
||||
build_dir='build_dir_1',
|
||||
size_difference=1000,
|
||||
size_difference_percentage=1.0,
|
||||
build_status=BuildStatus.SUCCESS,
|
||||
preserve=True,
|
||||
metrics={'binary_size': MagicMock(difference=1000, difference_percentage=1.0)},
|
||||
),
|
||||
MagicMock(
|
||||
app_dir='test_app_2',
|
||||
build_dir='build_dir_2',
|
||||
size_difference=2000,
|
||||
size_difference_percentage=2.0,
|
||||
build_status=BuildStatus.SUCCESS,
|
||||
preserve=False,
|
||||
metrics={'binary_size': MagicMock(difference=2000, difference_percentage=2.0)},
|
||||
),
|
||||
]
|
||||
|
||||
build_report_generator = BuildReportGenerator(
|
||||
project_id=123, mr_iid=1, pipeline_id=456, job_id=0, commit_id='cccc', title='Build Report', apps=apps
|
||||
)
|
||||
|
||||
built_apps_report_parts = build_report_generator.get_built_apps_report_parts()
|
||||
|
||||
self.assertGreater(len(built_apps_report_parts), 0)
|
||||
self.assertIn('test_app_1', ''.join(built_apps_report_parts))
|
||||
self.assertIn('test_app_2', ''.join(built_apps_report_parts))
|
||||
|
||||
def test_get_failed_apps_report_parts(self) -> None:
|
||||
failed_apps = [
|
||||
MagicMock(
|
||||
app_dir='failed_app_1',
|
||||
build_dir='build_dir_1',
|
||||
build_comment='Compilation error',
|
||||
build_status=BuildStatus.FAILED,
|
||||
metrics={'binary_size': MagicMock(difference=None, difference_percentage=None)},
|
||||
),
|
||||
MagicMock(
|
||||
app_dir='failed_app_2',
|
||||
build_dir='build_dir_2',
|
||||
build_comment='Linker error',
|
||||
build_status=BuildStatus.FAILED,
|
||||
metrics={'binary_size': MagicMock(difference=None, difference_percentage=None)},
|
||||
),
|
||||
]
|
||||
|
||||
build_report_generator = BuildReportGenerator(
|
||||
project_id=123,
|
||||
mr_iid=1,
|
||||
pipeline_id=456,
|
||||
job_id=0,
|
||||
commit_id='cccc',
|
||||
title='Build Report',
|
||||
apps=failed_apps,
|
||||
)
|
||||
|
||||
failed_apps_report_parts = build_report_generator.get_failed_apps_report_parts()
|
||||
|
||||
self.assertGreater(len(failed_apps_report_parts), 0)
|
||||
self.assertIn('failed_app_1', ''.join(failed_apps_report_parts))
|
||||
self.assertIn('failed_app_2', ''.join(failed_apps_report_parts))
|
||||
|
||||
def test_get_skipped_apps_report_parts(self) -> None:
|
||||
skipped_apps = [
|
||||
MagicMock(
|
||||
app_dir='skipped_app_1',
|
||||
build_dir='build_dir_1',
|
||||
build_comment='Dependencies unmet',
|
||||
build_status=BuildStatus.SKIPPED,
|
||||
metrics={'binary_size': MagicMock(difference=None, difference_percentage=None)},
|
||||
),
|
||||
MagicMock(
|
||||
app_dir='skipped_app_2',
|
||||
build_dir='build_dir_2',
|
||||
build_comment='Feature flag disabled',
|
||||
build_status=BuildStatus.SKIPPED,
|
||||
metrics={'binary_size': MagicMock(difference=None, difference_percentage=None)},
|
||||
),
|
||||
]
|
||||
|
||||
build_report_generator = BuildReportGenerator(
|
||||
project_id=123,
|
||||
mr_iid=1,
|
||||
pipeline_id=456,
|
||||
job_id=0,
|
||||
commit_id='cccc',
|
||||
title='Build Report',
|
||||
apps=skipped_apps,
|
||||
)
|
||||
|
||||
skipped_apps_report_parts = build_report_generator.get_skipped_apps_report_parts()
|
||||
|
||||
self.assertGreater(len(skipped_apps_report_parts), 0)
|
||||
self.assertIn('skipped_app_1', ''.join(skipped_apps_report_parts))
|
||||
self.assertIn('skipped_app_2', ''.join(skipped_apps_report_parts))
|
||||
|
||||
def test_build_report_html_structure(self) -> None:
|
||||
report = self.build_report_generator._get_report_str()
|
||||
self.assertEqual(report, self.expected_build_report_html)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,11 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
@@ -16,42 +11,6 @@ from .constants import CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
|
||||
from .constants import CI_PAGES_URL
|
||||
from .constants import CI_PROJECT_URL
|
||||
from .models import GitlabJob
|
||||
from .models import TestCase
|
||||
|
||||
|
||||
def parse_testcases_from_filepattern(junit_report_filepattern: str) -> list[TestCase]:
|
||||
"""
|
||||
Parses test cases from XML files matching the provided file pattern.
|
||||
|
||||
>>> test_cases = parse_testcases_from_filepattern('path/to/your/junit/reports/*.xml')
|
||||
|
||||
:param junit_report_filepattern: The file pattern to match XML files containing JUnit test reports.
|
||||
:return: List[TestCase]: A list of TestCase objects parsed from the XML files.
|
||||
"""
|
||||
|
||||
test_cases = []
|
||||
for f in glob.glob(junit_report_filepattern):
|
||||
root = ET.parse(f).getroot()
|
||||
for tc in root.findall('.//testcase'):
|
||||
test_cases.append(TestCase.from_test_case_node(tc))
|
||||
return test_cases
|
||||
|
||||
|
||||
def load_known_failure_cases() -> set[str] | None:
|
||||
known_failures_file = os.getenv('KNOWN_FAILURE_CASES_FILE_NAME', '')
|
||||
if not known_failures_file:
|
||||
return None
|
||||
try:
|
||||
with open(known_failures_file) as f:
|
||||
file_content = f.read()
|
||||
|
||||
pattern = re.compile(r'^(.*?)\s+#\s+([A-Z]+)-\d+', re.MULTILINE)
|
||||
matches = pattern.findall(file_content)
|
||||
|
||||
known_cases_list = [match[0].strip() for match in matches]
|
||||
return set(known_cases_list)
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
|
||||
def is_url(string: str) -> bool:
|
||||
@@ -109,33 +68,6 @@ def fetch_failed_jobs(commit_id: str) -> list[GitlabJob]:
|
||||
return combined_jobs
|
||||
|
||||
|
||||
def fetch_failed_testcases_failure_ratio(failed_testcases: list[TestCase], branches_filter: dict) -> list[TestCase]:
|
||||
"""
|
||||
Fetches info about failure rates of testcases using an API request to ci-dashboard-api.
|
||||
:param failed_testcases: The list of failed testcases models.
|
||||
:param branches_filter: The filter to filter testcases by branch names.
|
||||
:return: A list of testcases with enriched with failure rates data.
|
||||
"""
|
||||
req_json = {'testcase_names': list(set([testcase.name for testcase in failed_testcases])), **branches_filter}
|
||||
response = requests.post(
|
||||
f'{CI_DASHBOARD_API}/testcases/failure_ratio',
|
||||
headers={'CI-Job-Token': CI_JOB_TOKEN},
|
||||
json=req_json,
|
||||
)
|
||||
if response.status_code != 200:
|
||||
print(f'Failed to fetch testcases failure rate data: {response.status_code} with error: {response.text}')
|
||||
return []
|
||||
|
||||
failure_rate_data = response.json()
|
||||
failure_rates = {item['name']: item for item in failure_rate_data.get('testcases', [])}
|
||||
|
||||
for testcase in failed_testcases:
|
||||
testcase.latest_total_count = failure_rates.get(testcase.name, {}).get('total_count', 0)
|
||||
testcase.latest_failed_count = failure_rates.get(testcase.name, {}).get('failed_count', 0)
|
||||
|
||||
return failed_testcases
|
||||
|
||||
|
||||
def fetch_app_metrics(
|
||||
source_commit_sha: str,
|
||||
target_commit_sha: str,
|
||||
@@ -217,34 +149,3 @@ def get_repository_file_url(file_path: str) -> str:
|
||||
:return: The modified URL pointing to the file's path in the repository.
|
||||
"""
|
||||
return f'{CI_PROJECT_URL}/-/raw/{CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}/{file_path}'
|
||||
|
||||
|
||||
def known_failure_issue_jira_fast_link(_item: TestCase) -> str:
|
||||
"""
|
||||
Generate a JIRA fast link for known issues with relevant test case details.
|
||||
"""
|
||||
jira_url = os.getenv('JIRA_SERVER')
|
||||
jira_pid = os.getenv('JIRA_KNOWN_FAILURE_PID', '10514')
|
||||
jira_issuetype = os.getenv('JIRA_KNOWN_FAILURE_ISSUETYPE', '10004')
|
||||
jira_component = os.getenv('JIRA_KNOWN_FAILURE_COMPONENT', '11909')
|
||||
jira_assignee = os.getenv('JIRA_KNOWN_FAILURE_ASSIGNEE', 'zhangjianwen')
|
||||
jira_affected_versions = os.getenv('JIRA_KNOWN_FAILURE_VERSIONS', '17602')
|
||||
jira_priority = os.getenv('JIRA_KNOWN_FAILURE_PRIORITY', '3')
|
||||
|
||||
base_url = f'{jira_url}/secure/CreateIssueDetails!init.jspa?'
|
||||
params = {
|
||||
'pid': jira_pid,
|
||||
'issuetype': jira_issuetype,
|
||||
'summary': f'[Test Case]{_item.name}',
|
||||
'description': (
|
||||
f'job_url: {quote(_item.ci_job_url, safe=":/")}\n\n'
|
||||
f'dut_log_url: {quote(_item.dut_log_url, safe=":/")}\n\n'
|
||||
f'ci_dashboard_url: {_item.ci_dashboard_url}\n\n'
|
||||
),
|
||||
'components': jira_component,
|
||||
'priority': jira_priority,
|
||||
'assignee': jira_assignee,
|
||||
'versions': jira_affected_versions,
|
||||
}
|
||||
query_string = urlencode(params)
|
||||
return f'<a href="{base_url}{query_string}">Create</a>'
|
||||
|
||||
Reference in New Issue
Block a user