ci: add failed jobs report generator. Improve Target Test Report

Introduced changes:
- refactor the cli script used for report generation
- introduce failed jobs report generator
- cover job report generation with tests
- add job failure rate
- add test cases failure rate
- add current branch / other branches statistic for failed jobs / testcases
This commit is contained in:
Aleksei Apaseev 2024-05-18 19:00:08 +08:00
parent aa27fbd231
commit 63bd3a18ad
18 changed files with 1117 additions and 322 deletions

View File

@ -30,4 +30,5 @@ include:
- '.gitlab/ci/integration_test.yml' - '.gitlab/ci/integration_test.yml'
- '.gitlab/ci/host-test.yml' - '.gitlab/ci/host-test.yml'
- '.gitlab/ci/deploy.yml' - '.gitlab/ci/deploy.yml'
- '.gitlab/ci/post_deploy.yml'
- '.gitlab/ci/test-win.yml' - '.gitlab/ci/test-win.yml'

View File

@ -395,6 +395,6 @@ test_idf_pytest_plugin:
junit: XUNIT_RESULT.xml junit: XUNIT_RESULT.xml
script: script:
- cd ${IDF_PATH}/tools/ci/dynamic_pipelines/tests/test_report_generator - cd ${IDF_PATH}/tools/ci/dynamic_pipelines/tests/test_report_generator
- python -m unittest test_target_test_report_generator.py - python -m unittest test_report_generator.py
- cd ${IDF_PATH}/tools/ci/idf_pytest - cd ${IDF_PATH}/tools/ci/idf_pytest
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml - pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml

View File

@ -0,0 +1,12 @@
generate_failed_jobs_report:
stage: post_deploy
tags: [build, shiny]
image: $ESP_ENV_IMAGE
when: always
artifacts:
expire_in: 1 week
when: always
paths:
- job_report.html
script:
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type job

View File

@ -135,6 +135,8 @@ class TestCase:
ci_job_url: t.Optional[str] = None ci_job_url: t.Optional[str] = None
ci_dashboard_url: t.Optional[str] = None ci_dashboard_url: t.Optional[str] = None
dut_log_url: t.Optional[str] = None dut_log_url: t.Optional[str] = None
latest_total_count: int = 0
latest_failed_count: int = 0
@property @property
def is_failure(self) -> bool: def is_failure(self) -> bool:
@ -179,3 +181,45 @@ class TestCase:
kwargs['skipped'] = skipped_node.attrib['message'] kwargs['skipped'] = skipped_node.attrib['message']
return cls(**kwargs) # type: ignore return cls(**kwargs) # type: ignore
@dataclass
class GitlabJob:
id: int
name: str
stage: str
status: str
url: str
ci_dashboard_url: str
failure_reason: t.Optional[str] = None
failure_log: t.Optional[str] = None
latest_total_count: int = 0
latest_failed_count: int = 0
@property
def is_failed(self) -> bool:
return self.status == 'failed'
@property
def is_success(self) -> bool:
return self.status == 'success'
@classmethod
def from_json_data(cls, job_data: dict, failure_data: dict) -> t.Optional['GitlabJob']:
grafana_base_url = urllib.parse.urljoin(os.getenv('CI_DASHBOARD_HOST', ''), '/d/LoUa-qLWz/job-list')
encoded_params = urllib.parse.urlencode({'var-job_name': job_data['name']}, quote_via=urllib.parse.quote)
kwargs = {
'id': job_data['id'],
'name': job_data['name'],
'stage': job_data['stage'],
'status': job_data['status'],
'url': job_data['url'],
'ci_dashboard_url': f'{grafana_base_url}?{encoded_params}',
'failure_reason': job_data['failure_reason'],
'failure_log': job_data['failure_log'],
'latest_total_count': failure_data.get('total_count', 0),
'latest_failed_count': failure_data.get('failed_count', 0),
}
return cls(**kwargs) # type: ignore

View File

@ -18,7 +18,9 @@ from prettytable import PrettyTable
from .constants import COMMENT_START_MARKER from .constants import COMMENT_START_MARKER
from .constants import REPORT_TEMPLATE_FILEPATH from .constants import REPORT_TEMPLATE_FILEPATH
from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from .models import GitlabJob
from .models import TestCase from .models import TestCase
from .utils import fetch_failed_testcases_failure_ratio
from .utils import is_url from .utils import is_url
from .utils import load_known_failure_cases from .utils import load_known_failure_cases
@ -58,25 +60,150 @@ class ReportGenerator:
def table_to_html_str(table: PrettyTable) -> str: def table_to_html_str(table: PrettyTable) -> str:
return html.unescape(table.get_html_string()) # type: ignore return html.unescape(table.get_html_string()) # type: ignore
def create_table_section(
self,
report_sections: list,
title: str,
items: list,
headers: list,
row_attrs: list,
value_functions: t.Optional[list] = None,
) -> None:
"""
Appends a formatted section to a report based on the provided items. This section includes
a header and a table constructed from the items list with specified headers and attributes.
:param report_sections: List where the HTML report sections are collected. This list is
modified in-place by appending new sections.
:param title: Title for the report section. This title is used as a header above the table.
:param items: List of item objects to include in the table. Each item should have attributes
that correspond to the row_attrs and value_functions specified.
:param headers: List of strings that will serve as the column headers in the generated table.
:param row_attrs: List of attributes to include from each item for the table rows. These
should be attributes or keys that exist on the items in the 'items' list.
:param value_functions: Optional list of tuples containing additional header and corresponding
value function. Each tuple should specify a header (as a string) and
a function that takes an item and returns a string. This is used for
generating dynamic columns based on item data.
:return: None. The function modifies the 'report_sections' list by appending new HTML sections.
"""
if not items:
return
report_sections.append(f'<h2>{title}</h2>')
report_sections.append(
self._create_table_for_items(
items=items, headers=headers, row_attrs=row_attrs, value_functions=value_functions or []
)
)
def _create_table_for_items(
self,
items: t.Union[t.List[TestCase], t.List[GitlabJob]],
headers: t.List[str],
row_attrs: t.List[str],
value_functions: t.Optional[t.List[t.Tuple[str, t.Callable[[t.Union[TestCase, GitlabJob]], str]]]] = None,
) -> str:
"""
Create a PrettyTable and convert it to an HTML string for the provided test cases.
:param items: List of item objects to include in the table.
:param headers: List of strings for the table headers.
:param row_attrs: List of attributes to include in each row.
:param value_functions: List of tuples containing additional header and corresponding value function.
:return: HTML table string.
"""
table = PrettyTable()
table.field_names = headers
# Create a mapping of header names to their corresponding index in the headers list
header_index_map = {header: i for i, header in enumerate(headers)}
for item in items:
row = []
for attr in row_attrs:
value = str(getattr(item, attr, ''))
if is_url(value):
link = f'<a href="{value}">link</a>'
row.append(link)
else:
row.append(value)
# Insert values computed by value functions at the correct column position based on their headers
if value_functions:
for header, func in value_functions:
index = header_index_map.get(header)
if index is not None:
computed_value = func(item)
row.insert(index, computed_value)
table.add_row(row)
return self.table_to_html_str(table)
@staticmethod
def _filter_items(
items: t.Union[t.List[TestCase], t.List[GitlabJob]], condition: t.Callable[[t.Union[TestCase, GitlabJob]], bool]
) -> t.List[TestCase]:
"""
Filter items s based on a given condition.
:param items: List of items to filter by given condition.
:param condition: A function that evaluates to True or False for each items.
:return: List of filtered instances.
"""
return [item for item in items if condition(item)]
@staticmethod
def _sort_items(
items: t.List[t.Union[TestCase, GitlabJob]],
key: t.Union[str, t.Callable[[t.Union[TestCase, GitlabJob]], t.Any]],
order: str = 'asc',
) -> t.List[t.Union[TestCase, GitlabJob]]:
"""
Sort items based on a given key and order.
:param items: List of items to sort.
:param key: A string representing the attribute name or a function to extract the sorting key.
:param order: Order of sorting ('asc' for ascending, 'desc' for descending).
:return: List of sorted instances.
"""
key_func = None
if isinstance(key, str):
def key_func(item: t.Any) -> t.Any:
return getattr(item, key)
if key_func is not None:
try:
items = sorted(items, key=key_func, reverse=(order == 'desc'))
except TypeError:
print(f'Comparison for the key {key} is not supported')
return items
@abc.abstractmethod @abc.abstractmethod
def _get_report_str(self) -> str: def _get_report_str(self) -> str:
raise NotImplementedError raise NotImplementedError
def post_report(self, job_id: int, commit_id: str) -> None: def post_report(self, job_id: int, commit_id: str) -> None:
# report in html format, otherwise will exceed the limit # report in html format, otherwise will exceed the limit
with open(self.output_filepath, 'w') as fw:
fw.write(self._get_report_str())
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
comment = f'#### {self.title}\n' comment = f'#### {self.title}\n'
report_str = self._get_report_str()
if self.additional_info: if self.additional_info:
comment += f'{self.additional_info}\n' comment += f'{self.additional_info}\n'
comment += f""" if report_str:
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id}) with open(self.output_filepath, 'w') as fw:
fw.write(report_str)
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
comment += f"""
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id[:8]})
""" """
print(comment) print(comment)
@ -265,94 +392,172 @@ class TargetTestReportGenerator(ReportGenerator):
] ]
return matched_cases return matched_cases
def _filter_test_cases(self, condition: t.Callable[[TestCase], bool]) -> t.List[TestCase]:
"""
Filter test cases based on a given condition. In this scenario, we filter by status,
however it is possible to filter by other criteria.
:param condition: A function that evaluates to True or False for each test case.
:return: List of filtered TestCase instances.
"""
return [tc for tc in self.test_cases if condition(tc)]
def _create_table_for_test_cases(
self, test_cases: t.List[TestCase], headers: t.List[str], row_attrs: t.List[str]
) -> str:
"""
Create a PrettyTable and convert it to an HTML string for the provided test cases.
:param test_cases: List of TestCase objects to include in the table.
:param headers: List of strings for the table headers.
:param row_attrs: List of attributes to include in each row.
:return: HTML table string.
"""
table = PrettyTable()
table.field_names = headers
for tc in test_cases:
row = []
for attr in row_attrs:
value = getattr(tc, attr, '')
if is_url(value):
link = f'<a href="{value}">link</a>'
row.append(link)
else:
row.append(value)
table.add_row(row)
return self.table_to_html_str(table)
def _get_report_str(self) -> str: def _get_report_str(self) -> str:
""" """
Generate a complete HTML report string by processing test cases. Generate a complete HTML report string by processing test cases.
:return: Complete HTML report string. :return: Complete HTML report string.
""" """
table_str = '' report_parts: list = []
known_failures = self.get_known_failure_cases() known_failures = self.get_known_failure_cases()
known_failure_case_names = {case.name for case in known_failures} known_failure_case_names = {case.name for case in known_failures}
failed_test_cases = self._filter_test_cases( failed_test_cases = self._filter_items(
lambda tc: tc.is_failure and tc.name not in known_failure_case_names self.test_cases, lambda tc: tc.is_failure and tc.name not in known_failure_case_names
) )
skipped_test_cases = self._filter_test_cases(lambda tc: tc.is_skipped) failed_test_cases_with_ratio = self._sort_items(
successful_test_cases = self._filter_test_cases(lambda tc: tc.is_success) fetch_failed_testcases_failure_ratio(failed_test_cases), key='latest_failed_count'
)
skipped_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_skipped)
successful_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_success)
if failed_test_cases: current_branch_failures = self._sort_items(
table_str += '<h2>Failed Test Cases (Excludes Known Failure Cases)</h2>' self._filter_items(failed_test_cases_with_ratio, lambda tc: tc.latest_failed_count == 0),
table_str += self._create_table_for_test_cases( key='latest_failed_count',
test_cases=failed_test_cases, )
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'], other_branch_failures = self._sort_items(
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'], self._filter_items(
) failed_test_cases_with_ratio, lambda tc: tc.name not in [t.name for t in current_branch_failures]
),
key='latest_failed_count',
)
if known_failures: self.create_table_section(
table_str += '<h2>Known Failure Cases</h2>' report_sections=report_parts,
table_str += self._create_table_for_test_cases( title='Failed Test Cases on Your branch (Excludes Known Failure Cases)',
test_cases=known_failures, items=current_branch_failures,
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'], headers=[
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'], 'Test Case',
) 'Test Script File Path',
'Failure Reason',
'Failures across all other branches (20 latest testcases)',
'Job URL',
'Grafana URL',
],
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
value_functions=[
(
'Failures across all other branches (20 latest testcases)',
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
)
],
)
self.create_table_section(
report_sections=report_parts,
title='Failed Test Cases on Other branches (Excludes Known Failure Cases)',
items=other_branch_failures,
headers=[
'Test Case',
'Test Script File Path',
'Failure Reason',
'Failures across all other branches (20 latest testcases)',
'Job URL',
'Grafana URL',
],
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
value_functions=[
(
'Failures across all other branches (20 latest testcases)',
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
)
],
)
if skipped_test_cases: self.create_table_section(
table_str += '<h2>Skipped Test Cases</h2>' report_sections=report_parts,
table_str += self._create_table_for_test_cases( title='Known Failure Cases',
test_cases=skipped_test_cases, items=known_failures,
headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'], headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'], row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
) )
self.create_table_section(
report_sections=report_parts,
title='Skipped Test Cases',
items=skipped_test_cases,
headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'],
row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'],
)
self.create_table_section(
report_sections=report_parts,
title='Succeeded Test Cases',
items=successful_test_cases,
headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'],
row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'],
)
if successful_test_cases:
table_str += '<h2>Succeeded Test Cases</h2>'
table_str += self._create_table_for_test_cases(
test_cases=successful_test_cases,
headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'],
row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'],
)
self.additional_info = ( self.additional_info = (
'**Test Case Summary:**\n' '**Test Case Summary:**\n'
f'- **Failed Test Cases (Excludes Known Failure Cases):** {len(failed_test_cases)}\n' f'- **Failed Test Cases on Your Branch (Excludes Known Failure Cases):** {len(current_branch_failures)}.\n'
f'- **Failed Test Cases on Other Branches (Excludes Known Failure Cases):** {len(other_branch_failures)}.\n'
f'- **Known Failures:** {len(known_failures)}\n' f'- **Known Failures:** {len(known_failures)}\n'
f'- **Skipped Test Cases:** {len(skipped_test_cases)}\n' f'- **Skipped Test Cases:** {len(skipped_test_cases)}\n'
f'- **Succeeded Test Cases:** {len(successful_test_cases)}\n\n' f'- **Succeeded Test Cases:** {len(successful_test_cases)}\n\n'
f'Please check report below for more information.\n\n' 'Please check report below for more information.\n\n'
) )
return self.generate_html_report(table_str) return self.generate_html_report(''.join(report_parts))
class JobReportGenerator(ReportGenerator):
def __init__(
self,
project_id: int,
mr_iid: int,
pipeline_id: int,
*,
title: str = 'Job Report',
jobs: t.List[GitlabJob],
):
super().__init__(project_id, mr_iid, pipeline_id, title=title)
self.jobs = jobs
def _get_report_str(self) -> str:
"""
Generate a complete HTML report string by processing jobs.
:return: Complete HTML report string.
"""
report_str: str = ''
report_parts: list = []
if not self.jobs:
print('No jobs found, skip generating job report')
return 'No Job Found'
relevant_failed_jobs = self._sort_items(
self._filter_items(
self.jobs, lambda job: job.is_failed and job.stage not in ['integration_test', 'target_test']
),
key='latest_failed_count',
)
succeeded_jobs = self._filter_items(self.jobs, lambda job: job.is_success)
self.additional_info = (
'**Job Summary:**\n'
f'- **Failed Jobs (Excludes "integration_test" and "target_test" jobs):** {len(relevant_failed_jobs)}\n'
f'- **Succeeded Jobs:** {len(succeeded_jobs)}\n\n'
)
if relevant_failed_jobs:
self.create_table_section(
report_sections=report_parts,
title='Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
items=relevant_failed_jobs,
headers=[
'Job Name',
'Failure Reason',
'Failure Log',
'Failures across all other branches (10 latest jobs)',
'URL',
'CI Dashboard URL',
],
row_attrs=['name', 'failure_reason', 'failure_log', 'url', 'ci_dashboard_url'],
value_functions=[
(
'Failures across all other branches (10 latest jobs)',
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
)
],
)
self.additional_info += f'Please check report below for more information.\n\n'
report_str = self.generate_html_report(''.join(report_parts))
return report_str

View File

@ -1,59 +0,0 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import BuildReportGenerator
from idf_ci.app import import_apps_from_txt
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--app-list-filepattern',
default='list_job_*.txt',
help='App list file pattern',
)
args = parser.parse_args()
apps = []
for f in glob.glob(args.app_list_filepattern):
apps.extend(import_apps_from_txt(f))
report_generator = BuildReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, apps=apps)
report_generator.post_report(args.job_id, args.commit_id)

View File

@ -0,0 +1,101 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import typing as t
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import BuildReportGenerator
from dynamic_pipelines.report import JobReportGenerator
from dynamic_pipelines.report import TargetTestReportGenerator
from dynamic_pipelines.utils import fetch_failed_jobs
from dynamic_pipelines.utils import parse_testcases_from_filepattern
from idf_ci.app import import_apps_from_txt
def main() -> None:
parser: argparse.ArgumentParser = setup_argument_parser()
args: argparse.Namespace = parser.parse_args()
report_actions: t.Dict[str, t.Callable[[argparse.Namespace], None]] = {
'build': generate_build_report,
'target_test': generate_target_test_report,
'job': generate_jobs_report,
}
report_action = report_actions.get(args.report_type)
if report_action is None:
raise ValueError('Unknown report type is requested to be generated.')
report_action(args)
def setup_argument_parser() -> argparse.ArgumentParser:
report_type_parser: argparse.ArgumentParser = argparse.ArgumentParser(add_help=False)
report_type_parser.add_argument(
'--report-type', choices=['build', 'target_test', 'job'], required=True, help='Type of report to generate'
)
report_type_args: argparse.Namespace
remaining_args: t.List[str]
report_type_args, remaining_args = report_type_parser.parse_known_args()
parser: argparse.ArgumentParser = argparse.ArgumentParser(
description='Update reports in MR pipelines based on the selected report type',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
parents=[report_type_parser],
)
common_arguments(parser)
conditional_arguments(report_type_args, parser)
return parser
def common_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--project-id', type=int, default=os.getenv('CI_PROJECT_ID'), help='Project ID')
parser.add_argument('--mr-iid', type=int, default=os.getenv('CI_MERGE_REQUEST_IID'), help='Merge Request IID')
parser.add_argument('--pipeline-id', type=int, default=os.getenv('PARENT_PIPELINE_ID'), help='Pipeline ID')
parser.add_argument('--job-id', type=int, default=os.getenv('CI_JOB_ID'), help='Job ID')
parser.add_argument('--commit-id', default=os.getenv('CI_COMMIT_SHA'), help='MR commit ID')
def conditional_arguments(report_type_args: argparse.Namespace, parser: argparse.ArgumentParser) -> None:
if report_type_args.report_type == 'build':
parser.add_argument('--app-list-filepattern', default='list_job_*.txt', help='Pattern to match app list files')
elif report_type_args.report_type == 'target_test':
parser.add_argument(
'--junit-report-filepattern', default='XUNIT_RESULT*.xml', help='Pattern to match JUnit report files'
)
def generate_build_report(args: argparse.Namespace) -> None:
apps: t.List[t.Any] = [
app for file_name in glob.glob(args.app_list_filepattern) for app in import_apps_from_txt(file_name)
]
report_generator = BuildReportGenerator(
args.project_id, args.mr_iid, args.pipeline_id, apps=apps
)
report_generator.post_report(args.job_id, args.commit_id)
def generate_target_test_report(args: argparse.Namespace) -> None:
test_cases: t.List[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
report_generator = TargetTestReportGenerator(
args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases
)
report_generator.post_report(args.job_id, args.commit_id)
def generate_jobs_report(args: argparse.Namespace) -> None:
jobs: t.List[t.Any] = fetch_failed_jobs(args.commit_id)
if not jobs:
return
report_generator = JobReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, jobs=jobs)
report_generator.post_report(args.job_id, args.commit_id)
if __name__ == '__main__':
main()

View File

@ -1,55 +0,0 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import os
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import TargetTestReportGenerator
from dynamic_pipelines.utils import parse_testcases_from_filepattern
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--junit-report-filepattern',
default='XUNIT_RESULT*.xml',
help='Junit Report file pattern',
)
args = parser.parse_args()
test_cases = parse_testcases_from_filepattern(args.junit_report_filepattern)
report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases)
report_generator.post_report(args.job_id, args.commit_id)

View File

@ -8,4 +8,4 @@ generate_pytest_report:
- target_test_report.html - target_test_report.html
script: script:
- python tools/ci/get_known_failure_cases_file.py - python tools/ci/get_known_failure_cases_file.py
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py - python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test

View File

@ -1,23 +1,57 @@
<!doctype html> <!doctype html>
<html> <html>
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<title>{{title}}</title> <title>{{title}}</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
<link <link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" <style>
rel="stylesheet" .text-toggle, .full-text { cursor: pointer; }
/> th:nth-child(1), td:nth-child(1) { width: 5%; }
<link th:nth-child(2), td:nth-child(2),
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" th:nth-child(3), td:nth-child(3) { width: 30%; }
rel="stylesheet" th, td {
/> overflow: hidden;
</head> text-overflow: ellipsis;
}
<body> h2 {
margin-top: 10px;
}
</style>
</head>
<body>
<div class="container-fluid">{{table}}</div> <div class="container-fluid">{{table}}</div>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script> <script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
</body> <script>
$(document).ready(function() {
$('table.table td').each(function() {
var cell = $(this);
if (cell.text().length > 100) {
var originalText = cell.text();
var displayText = originalText.substring(0, 100) + '...';
cell.html('<span class="text-toggle">' + displayText + '</span><span class="full-text" style="display: none;">' + originalText + '</span>');
cell.append('<a href="#" class="toggle-link">Show More</a>');
}
});
$('body').on('click', '.toggle-link', function(e) {
e.preventDefault();
var link = $(this);
var textSpan = link.siblings('.full-text');
var toggleSpan = link.siblings('.text-toggle');
if (textSpan.is(':visible')) {
link.text('Show More');
textSpan.hide();
toggleSpan.show();
} else {
link.text('Show Less');
textSpan.show();
toggleSpan.hide();
}
});
});
</script>
</body>
</html> </html>

View File

@ -10,7 +10,7 @@ generate_pytest_build_report:
- build_report.html - build_report.html
- test_related_apps_download_urls.yml - test_related_apps_download_urls.yml
script: script:
- python tools/ci/dynamic_pipelines/scripts/generate_build_report.py - python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type build
generate_pytest_child_pipeline: generate_pytest_child_pipeline:
# finally, we can get some use out of the default behavior that downloads all artifacts from the previous stage # finally, we can get some use out of the default behavior that downloads all artifacts from the previous stage

View File

@ -0,0 +1,94 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<title>Job Report</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
<style>
.text-toggle, .full-text { cursor: pointer; }
th:nth-child(1), td:nth-child(1) { width: 5%; }
th:nth-child(2), td:nth-child(2),
th:nth-child(3), td:nth-child(3) { width: 30%; }
th, td {
overflow: hidden;
text-overflow: ellipsis;
}
h2 {
margin-top: 10px;
}
</style>
</head>
<body>
<div class="container-fluid"><h2>Failed Jobs (Excludes "integration_test" and "target_test" jobs)</h2><table data-toggle="table" data-search="true">
<thead>
<tr>
<th>Job Name</th>
<th>Failure Reason</th>
<th>Failure Log</th>
<th>Failures across all other branches (10 latest jobs)</th>
<th>URL</th>
<th>CI Dashboard URL</th>
</tr>
</thead>
<tbody>
<tr>
<td>build_clang_test_apps_esp32h2</td>
<td>Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason</td>
<td>Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason</td>
<td>2 / 10</td>
<td><a href="https://test.com/-/jobs/48838695">link</a></td>
<td><a href="https://test_dashboard_host/d/LoUa-qLWz/job-list?var-job_name=build_clang_test_apps_esp32h2">link</a></td>
</tr>
<tr>
<td>build_template_app</td>
<td>Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason</td>
<td>Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason</td>
<td>3 / 10</td>
<td><a href="https://test.com/-/jobs/48838704">link</a></td>
<td><a href="https://test_dashboard_host/d/LoUa-qLWz/job-list?var-job_name=build_template_app">link</a></td>
</tr>
<tr>
<td>check_public_headers</td>
<td>Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason</td>
<td>Some Failure Log</td>
<td>4 / 10</td>
<td><a href="https://test.com/-/jobs/48838705">link</a></td>
<td><a href="https://test_dashboard_host/d/LoUa-qLWz/job-list?var-job_name=check_public_headers">link</a></td>
</tr>
</tbody>
</table></div>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
<script>
$(document).ready(function() {
$('table.table td').each(function() {
var cell = $(this);
if (cell.text().length > 100) {
var originalText = cell.text();
var displayText = originalText.substring(0, 100) + '...';
cell.html('<span class="text-toggle">' + displayText + '</span><span class="full-text" style="display: none;">' + originalText + '</span>');
cell.append('<a href="#" class="toggle-link">Show More</a>');
}
});
$('body').on('click', '.toggle-link', function(e) {
e.preventDefault();
var link = $(this);
var textSpan = link.siblings('.full-text');
var toggleSpan = link.siblings('.text-toggle');
if (textSpan.is(':visible')) {
link.text('Show More');
textSpan.hide();
toggleSpan.show();
} else {
link.text('Show Less');
textSpan.show();
toggleSpan.hide();
}
});
});
</script>
</body>
</html>

View File

@ -1,26 +1,32 @@
<!doctype html> <!doctype html>
<html> <html>
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<title>Test Report</title> <title>Test Report</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
<link <link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" <style>
rel="stylesheet" .text-toggle, .full-text { cursor: pointer; }
/> th:nth-child(1), td:nth-child(1) { width: 5%; }
<link th:nth-child(2), td:nth-child(2),
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" th:nth-child(3), td:nth-child(3) { width: 30%; }
rel="stylesheet" th, td {
/> overflow: hidden;
</head> text-overflow: ellipsis;
}
<body> h2 {
<div class="container-fluid"><h2>Failed Test Cases (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true"> margin-top: 10px;
}
</style>
</head>
<body>
<div class="container-fluid"><h2>Failed Test Cases on Your branch (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true">
<thead> <thead>
<tr> <tr>
<th>Test Case</th> <th>Test Case</th>
<th>Test Script File Path</th> <th>Test Script File Path</th>
<th>Failure Reason</th> <th>Failure Reason</th>
<th>Failures across all other branches (20 latest testcases)</th>
<th>Job URL</th> <th>Job URL</th>
<th>Grafana URL</th> <th>Grafana URL</th>
</tr> </tr>
@ -30,20 +36,68 @@
<td>('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device</td> <td>('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device</td>
<td>components/driver/test_apps/i2c_test_apps/pytest_i2c.py</td> <td>components/driver/test_apps/i2c_test_apps/pytest_i2c.py</td>
<td>failed on setup with "EOFError"</td> <td>failed on setup with "EOFError"</td>
<td>0 / 20</td>
<td></td> <td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27defaults%27%2C%20%27defaults%27%29.test_i2c_multi_device">link</a></td> <td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27defaults%27%2C%20%27defaults%27%29.test_i2c_multi_device">link</a></td>
</tr> </tr>
<tr>
<td>esp32c3.release.test_esp_timer</td>
<td>components/esp_timer/test_apps/pytest_esp_timer_ut.py</td>
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</td>
<td>0 / 20</td>
<td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_esp_timer">link</a></td>
</tr>
<tr>
<td>esp32c3.default.test_wpa_supplicant_ut</td>
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</td>
<td>0 / 20</td>
<td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
</tr>
</tbody>
</table><h2>Failed Test Cases on Other branches (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true">
<thead>
<tr>
<th>Test Case</th>
<th>Test Script File Path</th>
<th>Failure Reason</th>
<th>Failures across all other branches (20 latest testcases)</th>
<th>Job URL</th>
<th>Grafana URL</th>
</tr>
</thead>
<tbody>
<tr> <tr>
<td>('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev</td> <td>('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev</td>
<td>components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py</td> <td>components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py</td>
<td>failed on setup with "EOFError"</td> <td>failed on setup with "EOFError"</td>
<td>3 / 20</td>
<td></td> <td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27default%27%2C%20%27default%27%29.test_i2s_multi_dev">link</a></td> <td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27default%27%2C%20%27default%27%29.test_i2s_multi_dev">link</a></td>
</tr> </tr>
<tr>
<td>esp32c2.default.test_wpa_supplicant_ut</td>
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
<td>AssertionError: Unity test failed</td>
<td>3 / 20</td>
<td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.default.test_wpa_supplicant_ut">link</a></td>
</tr>
<tr>
<td>esp32c3.512safe.test_wear_levelling</td>
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</td>
<td>3 / 20</td>
<td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.512safe.test_wear_levelling">link</a></td>
</tr>
<tr> <tr>
<td>esp32c3.release.test_wear_levelling</td> <td>esp32c3.release.test_wear_levelling</td>
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td> <td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</td> <td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</td>
<td>3 / 20</td>
<td></td> <td></td>
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_wear_levelling">link</a></td> <td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_wear_levelling">link</a></td>
</tr> </tr>
@ -174,5 +228,34 @@
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script> <script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
</body> <script>
$(document).ready(function() {
$('table.table td').each(function() {
var cell = $(this);
if (cell.text().length > 100) {
var originalText = cell.text();
var displayText = originalText.substring(0, 100) + '...';
cell.html('<span class="text-toggle">' + displayText + '</span><span class="full-text" style="display: none;">' + originalText + '</span>');
cell.append('<a href="#" class="toggle-link">Show More</a>');
}
});
$('body').on('click', '.toggle-link', function(e) {
e.preventDefault();
var link = $(this);
var textSpan = link.siblings('.full-text');
var toggleSpan = link.siblings('.text-toggle');
if (textSpan.is(':visible')) {
link.text('Show More');
textSpan.hide();
toggleSpan.show();
} else {
link.text('Show Less');
textSpan.show();
toggleSpan.hide();
}
});
});
</script>
</body>
</html> </html>

View File

@ -0,0 +1,7 @@
{
"jobs": [
{"failed_count": 2, "failure_ratio": 0.2, "total_count": 10, "name": "build_clang_test_apps_esp32h2"},
{"failed_count": 3, "failure_ratio": 0.3, "total_count": 10, "name": "build_template_app"},
{"failed_count": 4, "failure_ratio": 0.4, "total_count": 10, "name": "check_public_headers"}
]
}

View File

@ -0,0 +1,212 @@
{
"jobs": [
{
"duration_sec": 42.158688,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:38:16, 24 May 2024",
"id": 48838677,
"name": "check_pre_commit",
"pending_sec": 1.15148,
"runner_name": "FA002598-build",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838677"
},
{
"duration_sec": 35.438477,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:37:32, 24 May 2024",
"id": 48838675,
"name": "run-danger-mr-linter",
"pending_sec": 0.371668,
"runner_name": "BrnoVM0211",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838675"
},
{
"duration_sec": 30.202475,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:37:28, 24 May 2024",
"id": 48838682,
"name": "check_esp_system",
"pending_sec": 1.148756,
"runner_name": "ruby6-cent9 [32]",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838682"
},
{
"duration_sec": 33.75121,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:37:31, 24 May 2024",
"id": 48838679,
"name": "check_blobs",
"pending_sec": 0.725292,
"runner_name": "gem3-cent9 [32]",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838679"
},
{
"duration_sec": 121.84324,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:41:35, 24 May 2024",
"id": 48838687,
"name": "code_quality_check",
"pending_sec": 0.271973,
"runner_name": "ruby15-cent9 [32]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838687"
},
{
"duration_sec": 153.68849,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:39:32, 24 May 2024",
"id": 48838686,
"name": "fast_template_app",
"pending_sec": 2.319577,
"runner_name": "FA002598-build",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838686"
},
{
"duration_sec": 25.572954,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:37:23, 24 May 2024",
"id": 48838684,
"name": "check_configure_ci_environment_parsing",
"pending_sec": 1.184287,
"runner_name": "gem3-cent9 [32]",
"stage": "pre_check",
"status": "success",
"url": "https://test.com/-/jobs/48838684"
},
{
"duration_sec": 120.95287,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:41:34, 24 May 2024",
"id": 48838690,
"name": "build_clang_test_apps_esp32s3",
"pending_sec": 0.671956,
"runner_name": "ruby7-cent9 [32]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838690"
},
{
"duration_sec": 165.74513,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:42:19, 24 May 2024",
"id": 48838692,
"name": "build_clang_test_apps_esp32c2",
"pending_sec": 0.82007,
"runner_name": "PowerfulBuildRunner03 [16]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838692"
},
{
"duration_sec": 95.72326,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:41:09, 24 May 2024",
"id": 48838696,
"name": "build_clang_test_apps_esp32p4",
"pending_sec": 0.567116,
"runner_name": "gem3-cent9 [32]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838696"
},
{
"duration_sec": 122.19848,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:41:36, 24 May 2024",
"id": 48838691,
"name": "build_clang_test_apps_esp32c3",
"pending_sec": 0.709112,
"runner_name": "ruby6-cent9 [32]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838691"
},
{
"duration_sec": 148.09895,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:42:02, 24 May 2024",
"id": 48838694,
"name": "build_clang_test_apps_esp32c5",
"pending_sec": 0.779584,
"runner_name": "PowerfulBuildRunner04 [15]",
"stage": "build",
"status": "success",
"url": "https://test.com/-/jobs/48838694"
},
{
"duration_sec": 20.275927,
"failure_log": null,
"failure_reason": null,
"finished_at": "03:39:54, 24 May 2024",
"id": 48838699,
"name": "gen_integration_pipeline",
"pending_sec": 0.868898,
"runner_name": "FA002598-build",
"stage": "assign_test",
"status": "success",
"url": "https://test.com/-/jobs/48838699"
},
{
"duration_sec": 103.08849,
"failure_log": "Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
"finished_at": "03:41:17, 24 May 2024",
"id": 48838695,
"name": "build_clang_test_apps_esp32h2",
"pending_sec": 0.765111,
"runner_name": "gem2-cent9 [32]",
"stage": "build",
"status": "failed",
"url": "https://test.com/-/jobs/48838695"
},
{
"duration_sec": 634.59467,
"failure_log": "Some Failure LogSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
"finished_at": "03:50:09, 24 May 2024",
"id": 48838704,
"name": "build_template_app",
"pending_sec": 0.161796,
"runner_name": "ruby6-cent9 [32]",
"stage": "host_test",
"status": "failed",
"url": "https://test.com/-/jobs/48838704"
},
{
"duration_sec": 1060.0835,
"failure_log": "Some Failure Log",
"failure_reason": "Some Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure ReasonSome Failure Reason",
"finished_at": "03:55:14, 24 May 2024",
"id": 48838705,
"name": "check_public_headers",
"pending_sec": 0.449408,
"runner_name": "ruby6-cent9 [32]",
"stage": "host_test",
"status": "failed",
"url": "https://test.com/-/jobs/48838705"
}
]
}

View File

@ -0,0 +1,127 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import json
import os.path
import sys
import unittest
from unittest.mock import MagicMock
from unittest.mock import patch
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
from dynamic_pipelines.models import GitlabJob # noqa: E402
from dynamic_pipelines.report import JobReportGenerator, TargetTestReportGenerator # noqa: E402
from dynamic_pipelines.utils import load_file, parse_testcases_from_filepattern # noqa: E402
class TestReportGeneration(unittest.TestCase):
def setUp(self) -> None:
self.reports_sample_data_path = os.path.join(
os.environ.get('IDF_PATH', ''), 'tools', 'ci', 'dynamic_pipelines', 'tests', 'test_report_generator', 'reports_sample_data'
)
self.setup_patches()
self.load_test_and_job_reports()
self.create_report_generators()
def setup_patches(self) -> None:
self.gitlab_patcher = patch('dynamic_pipelines.report.Gitlab')
self.failure_rate_patcher = patch('dynamic_pipelines.report.fetch_failed_testcases_failure_ratio')
self.env_patcher = patch.dict('os.environ', {
'CI_DASHBOARD_HOST': 'https://test_dashboard_host',
'CI_PAGES_URL': 'https://artifacts_path',
'CI_JOB_ID': '1',
})
self.MockGitlab = self.gitlab_patcher.start()
self.test_cases_failure_rate = self.failure_rate_patcher.start()
self.env_patcher.start()
self.mock_project = MagicMock()
self.mock_mr = MagicMock()
self.MockGitlab.return_value.project = self.mock_project
self.mock_project.mergerequests.get.return_value = self.mock_mr
self.addCleanup(self.gitlab_patcher.stop)
self.addCleanup(self.env_patcher.stop)
self.addCleanup(self.failure_rate_patcher.stop)
def load_test_and_job_reports(self) -> None:
self.expected_target_test_report_html = load_file(
os.path.join(self.reports_sample_data_path, 'expected_target_test_report.html')
)
self.expected_job_report_html = load_file(
os.path.join(self.reports_sample_data_path, 'expected_job_report.html')
)
def create_report_generators(self) -> None:
jobs_response_raw = load_file(os.path.join(self.reports_sample_data_path, 'jobs_api_response.json'))
failure_rate_jobs_response = load_file(os.path.join(self.reports_sample_data_path, 'failure_rate_jobs_response.json'))
failure_rates = {item['name']: item for item in json.loads(failure_rate_jobs_response).get('jobs', [])}
jobs = [GitlabJob.from_json_data(job_json, failure_rates.get(job_json['name'], {})) for job_json in json.loads(jobs_response_raw)['jobs']]
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
self.target_test_report_generator = TargetTestReportGenerator(
project_id=123, mr_iid=1, pipeline_id=456, title='Test Report', test_cases=test_cases)
self.job_report_generator = JobReportGenerator(
project_id=123, mr_iid=1, pipeline_id=456, title='Job Report', jobs=jobs)
self.target_test_report_generator._known_failure_cases_set = {
'*.test_wpa_supplicant_ut',
'esp32c3.release.test_esp_timer',
'*.512safe.test_wear_levelling',
}
test_cases_failed = [tc for tc in test_cases if tc.is_failure]
for index, tc in enumerate(test_cases_failed):
tc.latest_total_count = 20
if index % 3 == 0:
tc.latest_failed_count = 0
else:
tc.latest_failed_count = 3
self.test_cases_failure_rate.return_value = test_cases_failed
def test_known_failure_cases(self) -> None:
known_failure_cases = self.target_test_report_generator.get_known_failure_cases()
self.assertEqual(len(known_failure_cases), 4)
def test_failed_cases_in_target_test_report(self) -> None:
known_failures = self.target_test_report_generator.get_known_failure_cases()
known_failure_case_names = {case.name for case in known_failures}
failed_testcases = self.target_test_report_generator._filter_items(
self.target_test_report_generator.test_cases,
lambda tc: tc.is_failure and tc.name not in known_failure_case_names,
)
self.assertEqual(len(failed_testcases), 3)
def test_skipped_cases_in_target_test_report(self) -> None:
skipped_testcases = self.target_test_report_generator._filter_items(
self.target_test_report_generator.test_cases, lambda tc: tc.is_skipped
)
self.assertEqual(len(skipped_testcases), 1)
def test_successful_cases_in_target_test_report(self) -> None:
succeeded_testcases = self.target_test_report_generator._filter_items(
self.target_test_report_generator.test_cases, lambda tc: tc.is_success
)
self.assertEqual(len(succeeded_testcases), 9)
def test_target_test_report_html_structure(self) -> None:
report = self.target_test_report_generator._get_report_str()
self.assertEqual(report, self.expected_target_test_report_html)
def test_failed_jobs_in_job_report(self) -> None:
failed_jobs = self.job_report_generator._filter_items(self.job_report_generator.jobs, lambda job: job.is_failed)
self.assertEqual(len(failed_jobs), 3)
def test_successful_jobs_in_job_report(self) -> None:
succeeded_jobs = self.job_report_generator._filter_items(
self.job_report_generator.jobs, lambda job: job.is_success
)
self.assertEqual(len(succeeded_jobs), 13)
def test_job_report_html_structure(self) -> None:
report = self.job_report_generator._get_report_str()
self.assertEqual(report, self.expected_job_report_html)
if __name__ == '__main__':
unittest.main()

View File

@ -1,96 +0,0 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os.path
import sys
import unittest
from unittest.mock import MagicMock
from unittest.mock import patch
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci', 'python_packages'))
sys.path.insert(0, os.path.join(f'{os.environ.get("IDF_PATH")}', 'tools', 'ci'))
from dynamic_pipelines.report import TargetTestReportGenerator # noqa: E402
from dynamic_pipelines.utils import parse_testcases_from_filepattern # noqa: E402
class TestReportGeneration(unittest.TestCase):
@classmethod
def load_expected_report(cls, file_path: str) -> str:
"""
Loads the content of an expected report HTML file.
:param file_path: The path to the file containing the expected HTML report.
:return: The content of the file as a string.
"""
with open(file_path, 'r') as file:
return file.read()
def setUp(self) -> None:
patcher = patch('dynamic_pipelines.report.Gitlab')
env_patcher = patch.dict('os.environ', {
'CI_DASHBOARD_HOST': 'https://test_dashboard_host',
'CI_PAGES_URL': 'https://artifacts_path',
'CI_JOB_ID': '1',
})
env_patcher.start()
self.MockGitlab = patcher.start()
self.addCleanup(patcher.stop)
self.addCleanup(env_patcher.stop)
self.reports_sample_data_path = os.path.join(
os.environ.get('IDF_PATH', ''), # type: ignore
'tools',
'ci',
'dynamic_pipelines',
'tests',
'test_report_generator',
'reports_sample_data'
)
self.mock_project = MagicMock()
self.mock_mr = MagicMock()
self.MockGitlab.return_value.project = self.mock_project
self.mock_project.mergerequests.get.return_value = self.mock_mr
self.expected_report_html = self.load_expected_report(
os.path.join(self.reports_sample_data_path, 'expected_target_test_report.html')
)
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
self.report_generator = TargetTestReportGenerator(
project_id=123, mr_iid=1, pipeline_id=456, title='Test Report', test_cases=test_cases
)
self.report_generator._known_failure_cases_set = {
'*.test_wpa_supplicant_ut',
'esp32c3.release.test_esp_timer',
'*.512safe.test_wear_levelling',
}
def test_known_failure_cases(self) -> None:
known_failure_cases = self.report_generator.get_known_failure_cases()
self.assertEqual(len(known_failure_cases), 4)
def test_failed_cases_in_report(self) -> None:
known_failures = self.report_generator.get_known_failure_cases()
known_failure_case_names = {case.name for case in known_failures}
failed_testcases = self.report_generator._filter_test_cases(
lambda tc: tc.is_failure and tc.name not in known_failure_case_names
)
self.assertEqual(len(failed_testcases), 3)
def test_skipped_cases_in_report(self) -> None:
skipped_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_skipped)
self.assertEqual(len(skipped_testcases), 1)
def test_successful_cases_in_report(self) -> None:
succeeded_testcases = self.report_generator._filter_test_cases(lambda tc: tc.is_success)
self.assertEqual(len(succeeded_testcases), 9)
def test_complete_html_structure(self) -> None:
report = self.report_generator._get_report_str()
self.assertEqual(report, self.expected_report_html)
if __name__ == '__main__':
unittest.main()

View File

@ -7,8 +7,10 @@ import typing as t
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from urllib.parse import urlparse from urllib.parse import urlparse
import requests
import yaml import yaml
from .models import GitlabJob
from .models import Job from .models import Job
from .models import TestCase from .models import TestCase
@ -81,3 +83,86 @@ def is_url(string: str) -> bool:
""" """
parsed = urlparse(string) parsed = urlparse(string)
return bool(parsed.scheme) and bool(parsed.netloc) return bool(parsed.scheme) and bool(parsed.netloc)
def fetch_failed_jobs(commit_id: str) -> t.List[GitlabJob]:
"""
Fetches a list of jobs from the specified commit_id using an API request to ci-dashboard-api.
:param commit_id: The commit ID for which to fetch jobs.
:return: A list of jobs if the request is successful, otherwise an empty list.
"""
token = os.getenv('ESPCI_TOKEN', '')
ci_dash_api_backend_host = os.getenv('CI_DASHBOARD_API', '')
response = requests.get(
f'{ci_dash_api_backend_host}/commits/{commit_id}/jobs',
headers={'Authorization': f'Bearer {token}'}
)
if response.status_code != 200:
print(f'Failed to fetch jobs data: {response.status_code} with error: {response.text}')
return []
data = response.json()
jobs = data.get('jobs', [])
if not jobs:
return []
failed_job_names = [job['name'] for job in jobs if job['status'] == 'failed']
response = requests.post(
f'{ci_dash_api_backend_host}/jobs/failure_ratio',
headers={'Authorization': f'Bearer {token}'},
json={'job_names': failed_job_names, 'exclude_branches': [os.getenv('CI_COMMIT_BRANCH', '')]},
)
if response.status_code != 200:
print(f'Failed to fetch jobs failure rate data: {response.status_code} with error: {response.text}')
return []
failure_rate_data = response.json()
failure_rates = {item['name']: item for item in failure_rate_data.get('jobs', [])}
combined_jobs = []
for job in jobs:
failure_data = failure_rates.get(job['name'], {})
combined_jobs.append(GitlabJob.from_json_data(job, failure_data))
return combined_jobs
def fetch_failed_testcases_failure_ratio(failed_testcases: t.List[TestCase]) -> t.List[TestCase]:
"""
Fetches info about failure rates of testcases using an API request to ci-dashboard-api.
:param failed_testcases: The list of failed testcases models.
:return: A list of testcases with enriched with failure rates data.
"""
token = os.getenv('ESPCI_TOKEN', '')
ci_dash_api_backend_host = os.getenv('CI_DASHBOARD_API', '')
response = requests.post(
f'{ci_dash_api_backend_host}/testcases/failure_ratio',
headers={'Authorization': f'Bearer {token}'},
json={'testcase_names': [testcase.name for testcase in failed_testcases],
'exclude_branches': [os.getenv('CI_COMMIT_BRANCH', '')],
},
)
if response.status_code != 200:
print(f'Failed to fetch testcases failure rate data: {response.status_code} with error: {response.text}')
return []
failure_rate_data = response.json()
failure_rates = {item['name']: item for item in failure_rate_data.get('testcases', [])}
for testcase in failed_testcases:
testcase.latest_total_count = failure_rates.get(testcase.name, {}).get('total_count', 0)
testcase.latest_failed_count = failure_rates.get(testcase.name, {}).get('failed_count', 0)
return failed_testcases
def load_file(file_path: str) -> str:
"""
Loads the content of a file.
:param file_path: The path to the file needs to be loaded.
:return: The content of the file as a string.
"""
with open(file_path, 'r') as file:
return file.read()