mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
ci: add dut_log_url column to failed testcases report
Introduced changes: - add xml attribute "dut_log_url" to pytest report - add column "dut_log_url" to failed testcases table of dynamic pipeline report - make the table header sticky - add permalinks to the Table Titles - split target test report by testcase type for better clarity - fix the logic of finding the testcases failed on cur branch / other branches
This commit is contained in:
parent
7d3ac1abe4
commit
cd59d96ff4
@ -3,6 +3,7 @@ generate_failed_jobs_report:
|
|||||||
tags: [build, shiny]
|
tags: [build, shiny]
|
||||||
image: $ESP_ENV_IMAGE
|
image: $ESP_ENV_IMAGE
|
||||||
when: always
|
when: always
|
||||||
|
dependencies: [] # Do not download artifacts from the previous stages
|
||||||
artifacts:
|
artifacts:
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
when: always
|
when: always
|
||||||
|
28
conftest.py
28
conftest.py
@ -252,6 +252,34 @@ def set_test_case_name(request: FixtureRequest, test_case_name: str) -> None:
|
|||||||
request.node.funcargs['test_case_name'] = test_case_name
|
request.node.funcargs['test_case_name'] = test_case_name
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def set_dut_log_url(record_xml_attribute: t.Callable[[str, object], None], _pexpect_logfile: str) -> t.Generator:
|
||||||
|
# Record the "dut_log_url" attribute in the XML report once test execution finished
|
||||||
|
yield
|
||||||
|
|
||||||
|
if not isinstance(_pexpect_logfile, str):
|
||||||
|
record_xml_attribute('dut_log_url', 'No log URL found')
|
||||||
|
return
|
||||||
|
|
||||||
|
ci_pages_url = os.getenv('CI_PAGES_URL')
|
||||||
|
logdir_pattern = re.compile(rf'({DEFAULT_LOGDIR}/.*)')
|
||||||
|
match = logdir_pattern.search(_pexpect_logfile)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
record_xml_attribute('dut_log_url', 'No log URL found')
|
||||||
|
return
|
||||||
|
|
||||||
|
if not ci_pages_url:
|
||||||
|
record_xml_attribute('dut_log_url', _pexpect_logfile)
|
||||||
|
return
|
||||||
|
|
||||||
|
job_id = os.getenv('CI_JOB_ID', '0')
|
||||||
|
modified_ci_pages_url = ci_pages_url.replace('esp-idf', '-/esp-idf')
|
||||||
|
log_url = f'{modified_ci_pages_url}/-/jobs/{job_id}/artifacts/{match.group(1)}'
|
||||||
|
|
||||||
|
record_xml_attribute('dut_log_url', log_url)
|
||||||
|
|
||||||
|
|
||||||
######################
|
######################
|
||||||
# Log Util Functions #
|
# Log Util Functions #
|
||||||
######################
|
######################
|
||||||
|
@ -166,6 +166,7 @@ class TestCase:
|
|||||||
'time': float(node.attrib.get('time') or 0),
|
'time': float(node.attrib.get('time') or 0),
|
||||||
'ci_job_url': node.attrib.get('ci_job_url') or '',
|
'ci_job_url': node.attrib.get('ci_job_url') or '',
|
||||||
'ci_dashboard_url': f'{grafana_base_url}?{encoded_params}',
|
'ci_dashboard_url': f'{grafana_base_url}?{encoded_params}',
|
||||||
|
'dut_log_url': node.attrib.get('dut_log_url') or 'Not found',
|
||||||
}
|
}
|
||||||
|
|
||||||
failure_node = node.find('failure')
|
failure_node = node.find('failure')
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
import abc
|
import abc
|
||||||
|
import copy
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import html
|
import html
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import typing as t
|
import typing as t
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from artifacts_handler import ArtifactType
|
from artifacts_handler import ArtifactType
|
||||||
@ -21,20 +23,24 @@ from .constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
|||||||
from .models import GitlabJob
|
from .models import GitlabJob
|
||||||
from .models import TestCase
|
from .models import TestCase
|
||||||
from .utils import fetch_failed_testcases_failure_ratio
|
from .utils import fetch_failed_testcases_failure_ratio
|
||||||
|
from .utils import format_permalink
|
||||||
|
from .utils import get_report_url
|
||||||
from .utils import is_url
|
from .utils import is_url
|
||||||
from .utils import load_known_failure_cases
|
from .utils import load_known_failure_cases
|
||||||
|
|
||||||
|
|
||||||
class ReportGenerator:
|
class ReportGenerator:
|
||||||
REGEX_PATTERN = '#### {}[^####]+'
|
REGEX_PATTERN = r'#### {}\n[\s\S]*?(?=\n#### |$)'
|
||||||
|
|
||||||
def __init__(self, project_id: int, mr_iid: int, pipeline_id: int, *, title: str):
|
def __init__(self, project_id: int, mr_iid: int, pipeline_id: int, job_id: int, commit_id: str, *, title: str):
|
||||||
gl_project = Gitlab(project_id).project
|
gl_project = Gitlab(project_id).project
|
||||||
if mr_iid is not None:
|
if mr_iid is not None:
|
||||||
self.mr = gl_project.mergerequests.get(mr_iid)
|
self.mr = gl_project.mergerequests.get(mr_iid)
|
||||||
else:
|
else:
|
||||||
self.mr = None
|
self.mr = None
|
||||||
self.pipeline_id = pipeline_id
|
self.pipeline_id = pipeline_id
|
||||||
|
self.job_id = job_id
|
||||||
|
self.commit_id = commit_id
|
||||||
|
|
||||||
self.title = title
|
self.title = title
|
||||||
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
|
||||||
@ -47,10 +53,30 @@ class ReportGenerator:
|
|||||||
|
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
def write_report_to_file(self, report_str: str, job_id: int, output_filepath: str) -> t.Optional[str]:
|
||||||
|
"""
|
||||||
|
Writes the report to a file and constructs a modified URL based on environment settings.
|
||||||
|
|
||||||
|
:param report_str: The report content to be written to the file.
|
||||||
|
:param job_id: The job identifier used to construct the URL.
|
||||||
|
:param output_filepath: The path to the output file.
|
||||||
|
:return: The modified URL pointing to the job's artifacts.
|
||||||
|
"""
|
||||||
|
if not report_str:
|
||||||
|
return None
|
||||||
|
with open(output_filepath, 'w') as file:
|
||||||
|
file.write(report_str)
|
||||||
|
|
||||||
|
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
|
||||||
|
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
|
||||||
|
report_url: str = get_report_url(job_id, output_filepath)
|
||||||
|
return report_url
|
||||||
|
|
||||||
def generate_html_report(self, table_str: str) -> str:
|
def generate_html_report(self, table_str: str) -> str:
|
||||||
# we're using bootstrap table
|
# we're using bootstrap table
|
||||||
table_str = table_str.replace('<table>', '<table data-toggle="table" data-search="true">')
|
table_str = table_str.replace(
|
||||||
|
'<table>', '<table data-toggle="table" data-search="true" data-sticky-header="true">'
|
||||||
|
)
|
||||||
with open(REPORT_TEMPLATE_FILEPATH) as fr:
|
with open(REPORT_TEMPLATE_FILEPATH) as fr:
|
||||||
template = fr.read()
|
template = fr.read()
|
||||||
|
|
||||||
@ -62,19 +88,16 @@ class ReportGenerator:
|
|||||||
|
|
||||||
def create_table_section(
|
def create_table_section(
|
||||||
self,
|
self,
|
||||||
report_sections: list,
|
|
||||||
title: str,
|
title: str,
|
||||||
items: list,
|
items: list,
|
||||||
headers: list,
|
headers: list,
|
||||||
row_attrs: list,
|
row_attrs: list,
|
||||||
value_functions: t.Optional[list] = None,
|
value_functions: t.Optional[list] = None,
|
||||||
) -> None:
|
) -> t.List:
|
||||||
"""
|
"""
|
||||||
Appends a formatted section to a report based on the provided items. This section includes
|
Appends a formatted section to a report based on the provided items. This section includes
|
||||||
a header and a table constructed from the items list with specified headers and attributes.
|
a header and a table constructed from the items list with specified headers and attributes.
|
||||||
|
|
||||||
:param report_sections: List where the HTML report sections are collected. This list is
|
|
||||||
modified in-place by appending new sections.
|
|
||||||
:param title: Title for the report section. This title is used as a header above the table.
|
:param title: Title for the report section. This title is used as a header above the table.
|
||||||
:param items: List of item objects to include in the table. Each item should have attributes
|
:param items: List of item objects to include in the table. Each item should have attributes
|
||||||
that correspond to the row_attrs and value_functions specified.
|
that correspond to the row_attrs and value_functions specified.
|
||||||
@ -86,17 +109,34 @@ class ReportGenerator:
|
|||||||
a function that takes an item and returns a string. This is used for
|
a function that takes an item and returns a string. This is used for
|
||||||
generating dynamic columns based on item data.
|
generating dynamic columns based on item data.
|
||||||
|
|
||||||
:return: None. The function modifies the 'report_sections' list by appending new HTML sections.
|
:return: List with appended HTML sections.
|
||||||
"""
|
"""
|
||||||
if not items:
|
if not items:
|
||||||
return
|
return []
|
||||||
|
|
||||||
report_sections.append(f'<h2>{title}</h2>')
|
report_sections = [
|
||||||
report_sections.append(
|
f"""<h2 id="{format_permalink(title)}">{title}<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#{format_permalink(title)}')"></i></h2>""",
|
||||||
self._create_table_for_items(
|
self._create_table_for_items(
|
||||||
items=items, headers=headers, row_attrs=row_attrs, value_functions=value_functions or []
|
items=items, headers=headers, row_attrs=row_attrs, value_functions=value_functions or []
|
||||||
)
|
),
|
||||||
)
|
]
|
||||||
|
return report_sections
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_additional_info_section(title: str, count: int, report_url: t.Optional[str] = None) -> str:
|
||||||
|
"""
|
||||||
|
Generate a section for the additional info string.
|
||||||
|
|
||||||
|
:param title: The title of the section.
|
||||||
|
:param count: The count of test cases.
|
||||||
|
:param report_url: The URL of the report. If count = 0, only the count will be included.
|
||||||
|
:return: The formatted additional info section string.
|
||||||
|
"""
|
||||||
|
if count != 0 and report_url:
|
||||||
|
return f'- **{title}:** [{count}]({report_url}/#{format_permalink(title)})\n'
|
||||||
|
else:
|
||||||
|
return f'- **{title}:** {count}\n'
|
||||||
|
|
||||||
def _create_table_for_items(
|
def _create_table_for_items(
|
||||||
self,
|
self,
|
||||||
@ -185,7 +225,7 @@ class ReportGenerator:
|
|||||||
def _get_report_str(self) -> str:
|
def _get_report_str(self) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def post_report(self, job_id: int, commit_id: str) -> None:
|
def post_report(self, print_report_path: bool = True) -> None:
|
||||||
# report in html format, otherwise will exceed the limit
|
# report in html format, otherwise will exceed the limit
|
||||||
comment = f'#### {self.title}\n'
|
comment = f'#### {self.title}\n'
|
||||||
|
|
||||||
@ -194,18 +234,12 @@ class ReportGenerator:
|
|||||||
if self.additional_info:
|
if self.additional_info:
|
||||||
comment += f'{self.additional_info}\n'
|
comment += f'{self.additional_info}\n'
|
||||||
|
|
||||||
if report_str:
|
report_url_path = self.write_report_to_file(report_str, self.job_id, self.output_filepath)
|
||||||
with open(self.output_filepath, 'w') as fw:
|
if print_report_path and report_url_path:
|
||||||
fw.write(report_str)
|
comment += dedent(f"""
|
||||||
|
Full {self.title} here: {report_url_path} (with commit {self.commit_id[:8]}
|
||||||
|
|
||||||
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
|
""")
|
||||||
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
|
|
||||||
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
|
|
||||||
|
|
||||||
comment += f"""
|
|
||||||
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id[:8]})
|
|
||||||
|
|
||||||
"""
|
|
||||||
print(comment)
|
print(comment)
|
||||||
|
|
||||||
if self.mr is None:
|
if self.mr is None:
|
||||||
@ -234,11 +268,13 @@ class BuildReportGenerator(ReportGenerator):
|
|||||||
project_id: int,
|
project_id: int,
|
||||||
mr_iid: int,
|
mr_iid: int,
|
||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
|
job_id: int,
|
||||||
|
commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Build Report',
|
title: str = 'Build Report',
|
||||||
apps: t.List[App],
|
apps: t.List[App],
|
||||||
):
|
):
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
||||||
self.apps = apps
|
self.apps = apps
|
||||||
|
|
||||||
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
|
||||||
@ -365,14 +401,26 @@ class TargetTestReportGenerator(ReportGenerator):
|
|||||||
project_id: int,
|
project_id: int,
|
||||||
mr_iid: int,
|
mr_iid: int,
|
||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
|
job_id: int,
|
||||||
|
commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Target Test Report',
|
title: str = 'Target Test Report',
|
||||||
test_cases: t.List[TestCase],
|
test_cases: t.List[TestCase],
|
||||||
):
|
):
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
||||||
|
|
||||||
self.test_cases = test_cases
|
self.test_cases = test_cases
|
||||||
self._known_failure_cases_set = None
|
self._known_failure_cases_set = None
|
||||||
|
self.report_titles_map = {
|
||||||
|
'failed_yours': 'Failed Test Cases on Your branch (Excludes Known Failure Cases)',
|
||||||
|
'failed_others': 'Failed Test Cases on Other branches (Excludes Known Failure Cases)',
|
||||||
|
'failed_known': 'Known Failure Cases',
|
||||||
|
'skipped': 'Skipped Test Cases',
|
||||||
|
'succeeded': 'Succeeded Test Cases',
|
||||||
|
}
|
||||||
|
self.skipped_test_cases_report_file = 'skipped_cases.html'
|
||||||
|
self.succeeded_cases_report_file = 'succeeded_cases.html'
|
||||||
|
self.failed_cases_report_file = 'failed_cases.html'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def known_failure_cases_set(self) -> t.Optional[t.Set[str]]:
|
def known_failure_cases_set(self) -> t.Optional[t.Set[str]]:
|
||||||
@ -382,6 +430,10 @@ class TargetTestReportGenerator(ReportGenerator):
|
|||||||
return self._known_failure_cases_set
|
return self._known_failure_cases_set
|
||||||
|
|
||||||
def get_known_failure_cases(self) -> t.List[TestCase]:
|
def get_known_failure_cases(self) -> t.List[TestCase]:
|
||||||
|
"""
|
||||||
|
Retrieve the known failure test cases.
|
||||||
|
:return: A list of known failure test cases.
|
||||||
|
"""
|
||||||
if self.known_failure_cases_set is None:
|
if self.known_failure_cases_set is None:
|
||||||
return []
|
return []
|
||||||
matched_cases = [
|
matched_cases = [
|
||||||
@ -392,109 +444,187 @@ class TargetTestReportGenerator(ReportGenerator):
|
|||||||
]
|
]
|
||||||
return matched_cases
|
return matched_cases
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def filter_test_cases(
|
||||||
|
cur_branch_failures: t.List[TestCase],
|
||||||
|
other_branch_failures: t.List[TestCase],
|
||||||
|
) -> t.Tuple[t.List[TestCase], t.List[TestCase]]:
|
||||||
|
"""
|
||||||
|
Filter the test cases into current branch failures and other branch failures.
|
||||||
|
|
||||||
|
:param cur_branch_failures: List of failed test cases on the current branch.
|
||||||
|
:param other_branch_failures: List of failed test cases on other branches.
|
||||||
|
:return: A tuple containing two lists:
|
||||||
|
- failed_test_cases_cur_branch_only: Test cases that have failed only on the current branch.
|
||||||
|
- failed_test_cases_other_branch_exclude_cur_branch: Test cases that have failed on other branches
|
||||||
|
excluding the current branch.
|
||||||
|
"""
|
||||||
|
cur_branch_unique_failures = []
|
||||||
|
other_branch_failure_map = {tc.name: tc for tc in other_branch_failures}
|
||||||
|
|
||||||
|
for cur_tc in cur_branch_failures:
|
||||||
|
if cur_tc.latest_failed_count > 0 and (
|
||||||
|
cur_tc.name not in other_branch_failure_map
|
||||||
|
or other_branch_failure_map[cur_tc.name].latest_failed_count == 0
|
||||||
|
):
|
||||||
|
cur_branch_unique_failures.append(cur_tc)
|
||||||
|
uniq_fail_names = {cur_tc.name for cur_tc in cur_branch_unique_failures}
|
||||||
|
other_branch_exclusive_failures = [tc for tc in other_branch_failures if tc.name not in uniq_fail_names]
|
||||||
|
|
||||||
|
return cur_branch_unique_failures, other_branch_exclusive_failures
|
||||||
|
|
||||||
|
def get_failed_cases_report_parts(self) -> t.List[str]:
|
||||||
|
"""
|
||||||
|
Generate the report parts for failed test cases and update the additional info section.
|
||||||
|
:return: A list of strings representing the table sections for the failed test cases.
|
||||||
|
"""
|
||||||
|
known_failures = self.get_known_failure_cases()
|
||||||
|
failed_test_cases = self._filter_items(
|
||||||
|
self.test_cases, lambda tc: tc.is_failure and tc.name not in {case.name for case in known_failures}
|
||||||
|
)
|
||||||
|
failed_test_cases_cur_branch = self._sort_items(
|
||||||
|
fetch_failed_testcases_failure_ratio(
|
||||||
|
copy.deepcopy(failed_test_cases),
|
||||||
|
branches_filter={'include_branches': [os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME', '')]},
|
||||||
|
),
|
||||||
|
key='latest_failed_count',
|
||||||
|
)
|
||||||
|
failed_test_cases_other_branch = self._sort_items(
|
||||||
|
fetch_failed_testcases_failure_ratio(
|
||||||
|
copy.deepcopy(failed_test_cases),
|
||||||
|
branches_filter={'exclude_branches': [os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME', '')]},
|
||||||
|
),
|
||||||
|
key='latest_failed_count',
|
||||||
|
)
|
||||||
|
failed_test_cases_cur_branch, failed_test_cases_other_branch = self.filter_test_cases(
|
||||||
|
failed_test_cases_cur_branch, failed_test_cases_other_branch
|
||||||
|
)
|
||||||
|
cur_branch_cases_table_section = self.create_table_section(
|
||||||
|
title=self.report_titles_map['failed_yours'],
|
||||||
|
items=failed_test_cases_cur_branch,
|
||||||
|
headers=[
|
||||||
|
'Test Case',
|
||||||
|
'Test Script File Path',
|
||||||
|
'Failure Reason',
|
||||||
|
f'Failures on your branch (40 latest testcases)',
|
||||||
|
'Dut Log URL',
|
||||||
|
'Job URL',
|
||||||
|
'Grafana URL',
|
||||||
|
],
|
||||||
|
row_attrs=['name', 'file', 'failure', 'dut_log_url', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
value_functions=[
|
||||||
|
(
|
||||||
|
'Failures on your branch (40 latest testcases)',
|
||||||
|
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
other_branch_cases_table_section = self.create_table_section(
|
||||||
|
title=self.report_titles_map['failed_others'],
|
||||||
|
items=failed_test_cases_other_branch,
|
||||||
|
headers=[
|
||||||
|
'Test Case',
|
||||||
|
'Test Script File Path',
|
||||||
|
'Failure Reason',
|
||||||
|
'Failures across all other branches (40 latest testcases)',
|
||||||
|
'Dut Log URL',
|
||||||
|
'Job URL',
|
||||||
|
'Grafana URL',
|
||||||
|
],
|
||||||
|
row_attrs=['name', 'file', 'failure', 'dut_log_url', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
value_functions=[
|
||||||
|
(
|
||||||
|
'Failures across all other branches (40 latest testcases)',
|
||||||
|
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
known_failures_cases_table_section = self.create_table_section(
|
||||||
|
title=self.report_titles_map['failed_known'],
|
||||||
|
items=known_failures,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
failed_cases_report_url = self.write_report_to_file(
|
||||||
|
self.generate_html_report(
|
||||||
|
''.join(
|
||||||
|
cur_branch_cases_table_section
|
||||||
|
+ other_branch_cases_table_section
|
||||||
|
+ known_failures_cases_table_section
|
||||||
|
)
|
||||||
|
),
|
||||||
|
self.job_id,
|
||||||
|
self.failed_cases_report_file,
|
||||||
|
)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['failed_yours'], len(failed_test_cases_cur_branch), failed_cases_report_url
|
||||||
|
)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['failed_others'], len(failed_test_cases_other_branch), failed_cases_report_url
|
||||||
|
)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['failed_known'], len(known_failures), failed_cases_report_url
|
||||||
|
)
|
||||||
|
return cur_branch_cases_table_section + other_branch_cases_table_section + known_failures_cases_table_section
|
||||||
|
|
||||||
|
def get_skipped_cases_report_parts(self) -> t.List[str]:
|
||||||
|
"""
|
||||||
|
Generate the report parts for skipped test cases and update the additional info section.
|
||||||
|
:return: A list of strings representing the table sections for the skipped test cases.
|
||||||
|
"""
|
||||||
|
skipped_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_skipped)
|
||||||
|
skipped_cases_table_section = self.create_table_section(
|
||||||
|
title=self.report_titles_map['skipped'],
|
||||||
|
items=skipped_test_cases,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
skipped_cases_report_url = self.write_report_to_file(
|
||||||
|
self.generate_html_report(''.join(skipped_cases_table_section)),
|
||||||
|
self.job_id,
|
||||||
|
self.skipped_test_cases_report_file,
|
||||||
|
)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['skipped'], len(skipped_test_cases), skipped_cases_report_url
|
||||||
|
)
|
||||||
|
return skipped_cases_table_section
|
||||||
|
|
||||||
|
def get_succeeded_cases_report_parts(self) -> t.List[str]:
|
||||||
|
"""
|
||||||
|
Generate the report parts for succeeded test cases and update the additional info section.
|
||||||
|
:return: A list of strings representing the table sections for the succeeded test cases.
|
||||||
|
"""
|
||||||
|
succeeded_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_success)
|
||||||
|
succeeded_cases_table_section = self.create_table_section(
|
||||||
|
title=self.report_titles_map['succeeded'],
|
||||||
|
items=succeeded_test_cases,
|
||||||
|
headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'],
|
||||||
|
row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'],
|
||||||
|
)
|
||||||
|
succeeded_cases_report_url = self.write_report_to_file(
|
||||||
|
self.generate_html_report(''.join(succeeded_cases_table_section)),
|
||||||
|
self.job_id,
|
||||||
|
self.succeeded_cases_report_file,
|
||||||
|
)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['succeeded'], len(succeeded_test_cases), succeeded_cases_report_url
|
||||||
|
)
|
||||||
|
self.additional_info += '\n'
|
||||||
|
return succeeded_cases_table_section
|
||||||
|
|
||||||
def _get_report_str(self) -> str:
|
def _get_report_str(self) -> str:
|
||||||
"""
|
"""
|
||||||
Generate a complete HTML report string by processing test cases.
|
Generate a complete HTML report string by processing test cases.
|
||||||
:return: Complete HTML report string.
|
:return: Complete HTML report string.
|
||||||
"""
|
"""
|
||||||
report_parts: list = []
|
self.additional_info = f'**Test Case Summary (with commit {self.commit_id[:8]}):**\n'
|
||||||
|
failed_cases_report_parts = self.get_failed_cases_report_parts()
|
||||||
|
skipped_cases_report_parts = self.get_skipped_cases_report_parts()
|
||||||
|
succeeded_cases_report_parts = self.get_succeeded_cases_report_parts()
|
||||||
|
|
||||||
known_failures = self.get_known_failure_cases()
|
return self.generate_html_report(
|
||||||
known_failure_case_names = {case.name for case in known_failures}
|
''.join(failed_cases_report_parts + skipped_cases_report_parts + succeeded_cases_report_parts)
|
||||||
failed_test_cases = self._filter_items(
|
|
||||||
self.test_cases, lambda tc: tc.is_failure and tc.name not in known_failure_case_names
|
|
||||||
)
|
)
|
||||||
failed_test_cases_with_ratio = self._sort_items(
|
|
||||||
fetch_failed_testcases_failure_ratio(failed_test_cases), key='latest_failed_count'
|
|
||||||
)
|
|
||||||
skipped_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_skipped)
|
|
||||||
successful_test_cases = self._filter_items(self.test_cases, lambda tc: tc.is_success)
|
|
||||||
|
|
||||||
current_branch_failures = self._sort_items(
|
|
||||||
self._filter_items(failed_test_cases_with_ratio, lambda tc: tc.latest_failed_count == 0),
|
|
||||||
key='latest_failed_count',
|
|
||||||
)
|
|
||||||
other_branch_failures = self._sort_items(
|
|
||||||
self._filter_items(
|
|
||||||
failed_test_cases_with_ratio, lambda tc: tc.name not in [t.name for t in current_branch_failures]
|
|
||||||
),
|
|
||||||
key='latest_failed_count',
|
|
||||||
)
|
|
||||||
|
|
||||||
self.create_table_section(
|
|
||||||
report_sections=report_parts,
|
|
||||||
title='Failed Test Cases on Your branch (Excludes Known Failure Cases)',
|
|
||||||
items=current_branch_failures,
|
|
||||||
headers=[
|
|
||||||
'Test Case',
|
|
||||||
'Test Script File Path',
|
|
||||||
'Failure Reason',
|
|
||||||
'Failures across all other branches (20 latest testcases)',
|
|
||||||
'Job URL',
|
|
||||||
'Grafana URL',
|
|
||||||
],
|
|
||||||
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
|
||||||
value_functions=[
|
|
||||||
(
|
|
||||||
'Failures across all other branches (20 latest testcases)',
|
|
||||||
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
self.create_table_section(
|
|
||||||
report_sections=report_parts,
|
|
||||||
title='Failed Test Cases on Other branches (Excludes Known Failure Cases)',
|
|
||||||
items=other_branch_failures,
|
|
||||||
headers=[
|
|
||||||
'Test Case',
|
|
||||||
'Test Script File Path',
|
|
||||||
'Failure Reason',
|
|
||||||
'Failures across all other branches (20 latest testcases)',
|
|
||||||
'Job URL',
|
|
||||||
'Grafana URL',
|
|
||||||
],
|
|
||||||
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
|
||||||
value_functions=[
|
|
||||||
(
|
|
||||||
'Failures across all other branches (20 latest testcases)',
|
|
||||||
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.create_table_section(
|
|
||||||
report_sections=report_parts,
|
|
||||||
title='Known Failure Cases',
|
|
||||||
items=known_failures,
|
|
||||||
headers=['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL', 'Grafana URL'],
|
|
||||||
row_attrs=['name', 'file', 'failure', 'ci_job_url', 'ci_dashboard_url'],
|
|
||||||
)
|
|
||||||
self.create_table_section(
|
|
||||||
report_sections=report_parts,
|
|
||||||
title='Skipped Test Cases',
|
|
||||||
items=skipped_test_cases,
|
|
||||||
headers=['Test Case', 'Test Script File Path', 'Skipped Reason', 'Grafana URL'],
|
|
||||||
row_attrs=['name', 'file', 'skipped', 'ci_dashboard_url'],
|
|
||||||
)
|
|
||||||
self.create_table_section(
|
|
||||||
report_sections=report_parts,
|
|
||||||
title='Succeeded Test Cases',
|
|
||||||
items=successful_test_cases,
|
|
||||||
headers=['Test Case', 'Test Script File Path', 'Job URL', 'Grafana URL'],
|
|
||||||
row_attrs=['name', 'file', 'ci_job_url', 'ci_dashboard_url'],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.additional_info = (
|
|
||||||
'**Test Case Summary:**\n'
|
|
||||||
f'- **Failed Test Cases on Your Branch (Excludes Known Failure Cases):** {len(current_branch_failures)}.\n'
|
|
||||||
f'- **Failed Test Cases on Other Branches (Excludes Known Failure Cases):** {len(other_branch_failures)}.\n'
|
|
||||||
f'- **Known Failures:** {len(known_failures)}\n'
|
|
||||||
f'- **Skipped Test Cases:** {len(skipped_test_cases)}\n'
|
|
||||||
f'- **Succeeded Test Cases:** {len(successful_test_cases)}\n\n'
|
|
||||||
'Please check report below for more information.\n\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.generate_html_report(''.join(report_parts))
|
|
||||||
|
|
||||||
|
|
||||||
class JobReportGenerator(ReportGenerator):
|
class JobReportGenerator(ReportGenerator):
|
||||||
@ -503,12 +633,19 @@ class JobReportGenerator(ReportGenerator):
|
|||||||
project_id: int,
|
project_id: int,
|
||||||
mr_iid: int,
|
mr_iid: int,
|
||||||
pipeline_id: int,
|
pipeline_id: int,
|
||||||
|
job_id: int,
|
||||||
|
commit_id: str,
|
||||||
*,
|
*,
|
||||||
title: str = 'Job Report',
|
title: str = 'Job Report',
|
||||||
jobs: t.List[GitlabJob],
|
jobs: t.List[GitlabJob],
|
||||||
):
|
):
|
||||||
super().__init__(project_id, mr_iid, pipeline_id, title=title)
|
super().__init__(project_id, mr_iid, pipeline_id, job_id, commit_id, title=title)
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
self.report_titles_map = {
|
||||||
|
'failed_jobs': 'Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
|
||||||
|
'succeeded': 'Succeeded Jobs',
|
||||||
|
}
|
||||||
|
self.failed_jobs_report_file = 'job_report.html'
|
||||||
|
|
||||||
def _get_report_str(self) -> str:
|
def _get_report_str(self) -> str:
|
||||||
"""
|
"""
|
||||||
@ -516,7 +653,6 @@ class JobReportGenerator(ReportGenerator):
|
|||||||
:return: Complete HTML report string.
|
:return: Complete HTML report string.
|
||||||
"""
|
"""
|
||||||
report_str: str = ''
|
report_str: str = ''
|
||||||
report_parts: list = []
|
|
||||||
|
|
||||||
if not self.jobs:
|
if not self.jobs:
|
||||||
print('No jobs found, skip generating job report')
|
print('No jobs found, skip generating job report')
|
||||||
@ -530,34 +666,41 @@ class JobReportGenerator(ReportGenerator):
|
|||||||
)
|
)
|
||||||
succeeded_jobs = self._filter_items(self.jobs, lambda job: job.is_success)
|
succeeded_jobs = self._filter_items(self.jobs, lambda job: job.is_success)
|
||||||
|
|
||||||
self.additional_info = (
|
self.additional_info = f'**Job Summary (with commit {self.commit_id[:8]}):**\n'
|
||||||
'**Job Summary:**\n'
|
self.additional_info += self.generate_additional_info_section(
|
||||||
f'- **Failed Jobs (Excludes "integration_test" and "target_test" jobs):** {len(relevant_failed_jobs)}\n'
|
self.report_titles_map['succeeded'], len(succeeded_jobs)
|
||||||
f'- **Succeeded Jobs:** {len(succeeded_jobs)}\n\n'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if relevant_failed_jobs:
|
if not relevant_failed_jobs:
|
||||||
self.create_table_section(
|
self.additional_info += self.generate_additional_info_section(
|
||||||
report_sections=report_parts,
|
self.report_titles_map['failed_jobs'], len(relevant_failed_jobs)
|
||||||
title='Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
|
|
||||||
items=relevant_failed_jobs,
|
|
||||||
headers=[
|
|
||||||
'Job Name',
|
|
||||||
'Failure Reason',
|
|
||||||
'Failure Log',
|
|
||||||
'Failures across all other branches (10 latest jobs)',
|
|
||||||
'URL',
|
|
||||||
'CI Dashboard URL',
|
|
||||||
],
|
|
||||||
row_attrs=['name', 'failure_reason', 'failure_log', 'url', 'ci_dashboard_url'],
|
|
||||||
value_functions=[
|
|
||||||
(
|
|
||||||
'Failures across all other branches (10 latest jobs)',
|
|
||||||
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
self.additional_info += f'Please check report below for more information.\n\n'
|
return report_str
|
||||||
report_str = self.generate_html_report(''.join(report_parts))
|
|
||||||
|
report_sections = self.create_table_section(
|
||||||
|
title='Failed Jobs (Excludes "integration_test" and "target_test" jobs)',
|
||||||
|
items=relevant_failed_jobs,
|
||||||
|
headers=[
|
||||||
|
'Job Name',
|
||||||
|
'Failure Reason',
|
||||||
|
'Failure Log',
|
||||||
|
'Failures across all other branches (10 latest jobs)',
|
||||||
|
'URL',
|
||||||
|
'CI Dashboard URL',
|
||||||
|
],
|
||||||
|
row_attrs=['name', 'failure_reason', 'failure_log', 'url', 'ci_dashboard_url'],
|
||||||
|
value_functions=[
|
||||||
|
(
|
||||||
|
'Failures across all other branches (10 latest jobs)',
|
||||||
|
lambda item: f"{getattr(item, 'latest_failed_count', '')} / {getattr(item, 'latest_total_count', '')}",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
relevant_failed_jobs_report_url = get_report_url(self.job_id, self.failed_jobs_report_file)
|
||||||
|
self.additional_info += self.generate_additional_info_section(
|
||||||
|
self.report_titles_map['failed_jobs'], len(relevant_failed_jobs), relevant_failed_jobs_report_url
|
||||||
|
)
|
||||||
|
|
||||||
|
report_str = self.generate_html_report(''.join(report_sections))
|
||||||
|
|
||||||
return report_str
|
return report_str
|
||||||
|
@ -74,17 +74,17 @@ def generate_build_report(args: argparse.Namespace) -> None:
|
|||||||
app for file_name in glob.glob(args.app_list_filepattern) for app in import_apps_from_txt(file_name)
|
app for file_name in glob.glob(args.app_list_filepattern) for app in import_apps_from_txt(file_name)
|
||||||
]
|
]
|
||||||
report_generator = BuildReportGenerator(
|
report_generator = BuildReportGenerator(
|
||||||
args.project_id, args.mr_iid, args.pipeline_id, apps=apps
|
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, apps=apps
|
||||||
)
|
)
|
||||||
report_generator.post_report(args.job_id, args.commit_id)
|
report_generator.post_report()
|
||||||
|
|
||||||
|
|
||||||
def generate_target_test_report(args: argparse.Namespace) -> None:
|
def generate_target_test_report(args: argparse.Namespace) -> None:
|
||||||
test_cases: t.List[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
test_cases: t.List[t.Any] = parse_testcases_from_filepattern(args.junit_report_filepattern)
|
||||||
report_generator = TargetTestReportGenerator(
|
report_generator = TargetTestReportGenerator(
|
||||||
args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases
|
args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, test_cases=test_cases
|
||||||
)
|
)
|
||||||
report_generator.post_report(args.job_id, args.commit_id)
|
report_generator.post_report(print_report_path=False)
|
||||||
|
|
||||||
|
|
||||||
def generate_jobs_report(args: argparse.Namespace) -> None:
|
def generate_jobs_report(args: argparse.Namespace) -> None:
|
||||||
@ -93,8 +93,8 @@ def generate_jobs_report(args: argparse.Namespace) -> None:
|
|||||||
if not jobs:
|
if not jobs:
|
||||||
return
|
return
|
||||||
|
|
||||||
report_generator = JobReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, jobs=jobs)
|
report_generator = JobReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, args.job_id, args.commit_id, jobs=jobs)
|
||||||
report_generator.post_report(args.job_id, args.commit_id)
|
report_generator.post_report(print_report_path=False)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -6,6 +6,9 @@ generate_pytest_report:
|
|||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- target_test_report.html
|
- target_test_report.html
|
||||||
|
- failed_cases.html
|
||||||
|
- skipped_cases.html
|
||||||
|
- succeeded_cases.html
|
||||||
script:
|
script:
|
||||||
- python tools/ci/get_known_failure_cases_file.py
|
- python tools/ci/get_known_failure_cases_file.py
|
||||||
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
|
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type target_test
|
||||||
|
@ -5,18 +5,29 @@
|
|||||||
<title>{{title}}</title>
|
<title>{{title}}</title>
|
||||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
||||||
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
||||||
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.css">
|
||||||
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css" rel="stylesheet">
|
||||||
<style>
|
<style>
|
||||||
.text-toggle, .full-text { cursor: pointer; }
|
.text-toggle, .full-text { cursor: pointer; }
|
||||||
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
||||||
th:nth-child(2), td:nth-child(2),
|
th:nth-child(2), td:nth-child(2),
|
||||||
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
||||||
th, td {
|
th, td {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
}
|
}
|
||||||
h2 {
|
h2 {
|
||||||
margin-top: 10px;
|
margin-top: 10px;
|
||||||
}
|
}
|
||||||
|
.copy-link-icon {
|
||||||
|
font-size: 20px;
|
||||||
|
margin-left: 10px;
|
||||||
|
color: #8f8f97;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.copy-link-icon:hover {
|
||||||
|
color: #282b2c;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@ -24,8 +35,29 @@
|
|||||||
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
||||||
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.min.js"></script>
|
||||||
<script>
|
<script>
|
||||||
$(document).ready(function() {
|
$(window).on('load', function() {
|
||||||
|
var hash = window.location.hash;
|
||||||
|
if (hash) {
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(hash).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
function copyPermalink(anchorId) {
|
||||||
|
const fullUrl = window.location.origin + window.location.pathname + anchorId;
|
||||||
|
history.pushState(null, null, anchorId);
|
||||||
|
navigator.clipboard.writeText(fullUrl)
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(anchorId).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
$(document).ready(function() {
|
||||||
$('table.table td').each(function() {
|
$('table.table td').each(function() {
|
||||||
var cell = $(this);
|
var cell = $(this);
|
||||||
if (cell.text().length > 100) {
|
if (cell.text().length > 100) {
|
||||||
@ -35,7 +67,6 @@
|
|||||||
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$('body').on('click', '.toggle-link', function(e) {
|
$('body').on('click', '.toggle-link', function(e) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
var link = $(this);
|
var link = $(this);
|
||||||
@ -51,7 +82,7 @@
|
|||||||
toggleSpan.hide();
|
toggleSpan.hide();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
<testsuites>
|
<testsuites>
|
||||||
<testsuite errors="2" failures="0" hostname="FA002598" name="pytest" skipped="0" tests="2" time="22.981" timestamp="2024-05-17T17:51:26.669364">
|
<testsuite errors="2" failures="0" hostname="FA002598" name="pytest" skipped="0" tests="2" time="22.981" timestamp="2024-05-17T17:51:26.669364">
|
||||||
<testcase classname="components.driver.test_apps.i2c_test_apps.pytest_i2c" file="components/driver/test_apps/i2c_test_apps/pytest_i2c.py" line="21" name="('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device" time="11.910">
|
<testcase classname="components.driver.test_apps.i2c_test_apps.pytest_i2c" file="components/driver/test_apps/i2c_test_apps/pytest_i2c.py" line="21" name="('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device/dut.txt" time="11.910">
|
||||||
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||||
yield CaseTester(dut, **kwargs)
|
yield CaseTester(dut, **kwargs)
|
||||||
tools/ci/idf_unity_tester.py:202: in __init__
|
tools/ci/idf_unity_tester.py:202: in __init__
|
||||||
@ -18,7 +18,7 @@ tools/ci/idf_unity_tester.py:202: in __init__
|
|||||||
raise EOFError
|
raise EOFError
|
||||||
E EOFError</error>
|
E EOFError</error>
|
||||||
</testcase>
|
</testcase>
|
||||||
<testcase classname="components.driver.test_apps.i2s_test_apps.i2s_multi_dev.pytest_i2s_multi_dev" file="components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py" line="5" name="('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev" time="11.071">
|
<testcase classname="components.driver.test_apps.i2s_test_apps.i2s_multi_dev.pytest_i2s_multi_dev" file="components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py" line="5" name="('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev/dut.txt" time="11.071">
|
||||||
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
<error message="failed on setup with "EOFError"">conftest.py:74: in case_tester
|
||||||
yield CaseTester(dut, **kwargs)
|
yield CaseTester(dut, **kwargs)
|
||||||
tools/ci/idf_unity_tester.py:202: in __init__
|
tools/ci/idf_unity_tester.py:202: in __init__
|
||||||
@ -37,9 +37,9 @@ E EOFError</error>
|
|||||||
</testcase>
|
</testcase>
|
||||||
</testsuite>
|
</testsuite>
|
||||||
<testsuite errors="0" failures="1" hostname="GX64-C2-SH-1-ITS1N4" name="pytest" skipped="0" tests="3" time="101.163" timestamp="2024-05-17T17:52:04.061589">
|
<testsuite errors="0" failures="1" hostname="GX64-C2-SH-1-ITS1N4" name="pytest" skipped="0" tests="3" time="101.163" timestamp="2024-05-17T17:52:04.061589">
|
||||||
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.default.test_vfs_default" time="30.044"/>
|
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.default.test_vfs_default" dut_log_url="https://https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.default.test_vfs_default/dut.txt" time="30.044"/>
|
||||||
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.iram.test_vfs_default" time="28.323"/>
|
<testcase classname="components.vfs.test_apps.pytest_vfs" file="components/vfs/test_apps/pytest_vfs.py" line="7" name="esp32c2.iram.test_vfs_default" dut_log_url="https://https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.iram.test_vfs_default/dut.txt" time="28.323"/>
|
||||||
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c2.default.test_wpa_supplicant_ut" time="42.796">
|
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c2.default.test_wpa_supplicant_ut" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.default.test_wpa_supplicant_ut/dut.txt" time="42.796">
|
||||||
<failure message="AssertionError: Unity test failed">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1272: in pytest_runtest_call
|
<failure message="AssertionError: Unity test failed">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1272: in pytest_runtest_call
|
||||||
self._raise_dut_failed_cases_if_exists(duts) # type: ignore
|
self._raise_dut_failed_cases_if_exists(duts) # type: ignore
|
||||||
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1207: in _raise_dut_failed_cases_if_exists
|
/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/plugin.py:1207: in _raise_dut_failed_cases_if_exists
|
||||||
@ -48,19 +48,19 @@ E AssertionError: Unity test failed</failure>
|
|||||||
</testcase>
|
</testcase>
|
||||||
</testsuite>
|
</testsuite>
|
||||||
<testsuite errors="0" failures="0" hostname="runner-zmdq2hnf-project-103-concurrent-3" name="pytest" skipped="1" tests="8" time="123.596" timestamp="2024-05-17T03:04:11.412971">
|
<testsuite errors="0" failures="0" hostname="runner-zmdq2hnf-project-103-concurrent-3" name="pytest" skipped="1" tests="8" time="123.596" timestamp="2024-05-17T03:04:11.412971">
|
||||||
<testcase classname="test_common" file="test_common.py" line="114" name="test_python_interpreter_unix" time="7.523"/>
|
<testcase classname="test_common" file="test_common.py" line="114" name="test_python_interpreter_unix" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_python_interpreter_unix/dut.txt" time="7.523"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="133" name="test_python_interpreter_win" time="0.000">
|
<testcase classname="test_common" file="test_common.py" line="133" name="test_python_interpreter_win" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_python_interpreter_win/dut.txt" time="0.000">
|
||||||
<skipped message="Linux does not support executing .exe files" type="pytest.skip">/builds/espressif/esp-idf/tools/test_build_system/test_common.py:134: Linux does not support executing .exe files</skipped>
|
<skipped message="Linux does not support executing .exe files" type="pytest.skip">/builds/espressif/esp-idf/tools/test_build_system/test_common.py:134: Linux does not support executing .exe files</skipped>
|
||||||
</testcase>
|
</testcase>
|
||||||
<testcase classname="test_common" file="test_common.py" line="147" name="test_invoke_confserver" time="10.179"/>
|
<testcase classname="test_common" file="test_common.py" line="147" name="test_invoke_confserver" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_invoke_confserver/dut.txt" time="10.179"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="153" name="test_ccache_used_to_build" time="23.713"/>
|
<testcase classname="test_common" file="test_common.py" line="153" name="test_ccache_used_to_build" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_ccache_used_to_build/dut.txt" time="23.713"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="171" name="test_toolchain_prefix_in_description_file" time="8.390"/>
|
<testcase classname="test_common" file="test_common.py" line="171" name="test_toolchain_prefix_in_description_file" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_toolchain_prefix_in_description_file/dut.txt" time="8.390"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="178" name="test_subcommands_with_options" time="28.118"/>
|
<testcase classname="test_common" file="test_common.py" line="178" name="test_subcommands_with_options" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_subcommands_with_options/dut.txt" time="28.118"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="194" name="test_fallback_to_build_system_target" time="11.687"/>
|
<testcase classname="test_common" file="test_common.py" line="194" name="test_fallback_to_build_system_target" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_fallback_to_build_system_target/dut.txt" time="11.687"/>
|
||||||
<testcase classname="test_common" file="test_common.py" line="203" name="test_create_component_project" time="33.986"/>
|
<testcase classname="test_common" file="test_common.py" line="203" name="test_create_component_project" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/test_create_component_project/dut.txt" time="33.986"/>
|
||||||
</testsuite>
|
</testsuite>
|
||||||
<testsuite errors="0" failures="4" hostname="FA002285" name="pytest" skipped="0" tests="4" time="231.048" timestamp="2024-05-17T17:50:02.291973">
|
<testsuite errors="0" failures="4" hostname="FA002285" name="pytest" skipped="0" tests="4" time="231.048" timestamp="2024-05-17T17:50:02.291973">
|
||||||
<testcase classname="components.esp_timer.test_apps.pytest_esp_timer_ut" file="components/esp_timer/test_apps/pytest_esp_timer_ut.py" line="20" name="esp32c3.release.test_esp_timer" time="39.686">
|
<testcase classname="components.esp_timer.test_apps.pytest_esp_timer_ut" file="components/esp_timer/test_apps/pytest_esp_timer_ut.py" line="20" name="esp32c3.release.test_esp_timer" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_esp_timer/dut.txt" time="39.686">
|
||||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
@ -95,7 +95,7 @@ E pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of t
|
|||||||
E Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
E Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710
|
||||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</failure>
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</failure>
|
||||||
</testcase>
|
</testcase>
|
||||||
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.512safe.test_wear_levelling" time="69.850">
|
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.512safe.test_wear_levelling" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.512safe.test_wear_levelling/dut.txt" time="69.850">
|
||||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
@ -128,7 +128,7 @@ E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tes
|
|||||||
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes)
|
||||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</failure>
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</failure>
|
||||||
</testcase>
|
</testcase>
|
||||||
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.release.test_wear_levelling" time="70.304">
|
<testcase classname="components.wear_levelling.test_apps.pytest_wear_levelling" file="components/wear_levelling/test_apps/pytest_wear_levelling.py" line="7" name="esp32c3.release.test_wear_levelling" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_wear_levelling/dut.txt" time="70.304">
|
||||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)"
|
||||||
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
@ -161,7 +161,7 @@ E pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tes
|
|||||||
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
E Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes)
|
||||||
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</failure>
|
E Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</failure>
|
||||||
</testcase>
|
</testcase>
|
||||||
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c3.default.test_wpa_supplicant_ut" time="51.208">
|
<testcase classname="components.wpa_supplicant.test_apps.pytest_wpa_supplicant_ut" file="components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py" line="8" name="esp32c3.default.test_wpa_supplicant_ut" dut_log_url="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.default.test_wpa_supplicant_ut/dut.txt" time="51.208">
|
||||||
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
<failure message="pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests"
|
||||||
Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0
|
||||||
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt">/root/.espressif/python_env/idf5.2_py3.9_env/lib/python3.9/site-packages/pytest_embedded/dut.py:76: in wrapper
|
||||||
|
@ -5,22 +5,34 @@
|
|||||||
<title>Job Report</title>
|
<title>Job Report</title>
|
||||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
||||||
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
||||||
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.css">
|
||||||
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css" rel="stylesheet">
|
||||||
<style>
|
<style>
|
||||||
.text-toggle, .full-text { cursor: pointer; }
|
.text-toggle, .full-text { cursor: pointer; }
|
||||||
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
||||||
th:nth-child(2), td:nth-child(2),
|
th:nth-child(2), td:nth-child(2),
|
||||||
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
||||||
th, td {
|
th, td {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
}
|
}
|
||||||
h2 {
|
h2 {
|
||||||
margin-top: 10px;
|
margin-top: 10px;
|
||||||
}
|
}
|
||||||
|
.copy-link-icon {
|
||||||
|
font-size: 20px;
|
||||||
|
margin-left: 10px;
|
||||||
|
color: #8f8f97;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.copy-link-icon:hover {
|
||||||
|
color: #282b2c;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="container-fluid"><h2>Failed Jobs (Excludes "integration_test" and "target_test" jobs)</h2><table data-toggle="table" data-search="true">
|
<div class="container-fluid"><h2 id="failed-jobs">Failed Jobs (Excludes "integration_test" and "target_test" jobs)<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#failed-jobs')"></i></h2><table data-toggle="table" data-search="true" data-sticky-header="true">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Job Name</th>
|
<th>Job Name</th>
|
||||||
@ -61,8 +73,29 @@
|
|||||||
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
||||||
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.min.js"></script>
|
||||||
<script>
|
<script>
|
||||||
$(document).ready(function() {
|
$(window).on('load', function() {
|
||||||
|
var hash = window.location.hash;
|
||||||
|
if (hash) {
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(hash).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
function copyPermalink(anchorId) {
|
||||||
|
const fullUrl = window.location.origin + window.location.pathname + anchorId;
|
||||||
|
history.pushState(null, null, anchorId);
|
||||||
|
navigator.clipboard.writeText(fullUrl)
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(anchorId).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
$(document).ready(function() {
|
||||||
$('table.table td').each(function() {
|
$('table.table td').each(function() {
|
||||||
var cell = $(this);
|
var cell = $(this);
|
||||||
if (cell.text().length > 100) {
|
if (cell.text().length > 100) {
|
||||||
@ -72,7 +105,6 @@
|
|||||||
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$('body').on('click', '.toggle-link', function(e) {
|
$('body').on('click', '.toggle-link', function(e) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
var link = $(this);
|
var link = $(this);
|
||||||
@ -88,7 +120,7 @@
|
|||||||
toggleSpan.hide();
|
toggleSpan.hide();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -5,28 +5,41 @@
|
|||||||
<title>Test Report</title>
|
<title>Test Report</title>
|
||||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" />
|
||||||
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
<link href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css" rel="stylesheet" />
|
||||||
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.css">
|
||||||
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css" rel="stylesheet">
|
||||||
<style>
|
<style>
|
||||||
.text-toggle, .full-text { cursor: pointer; }
|
.text-toggle, .full-text { cursor: pointer; }
|
||||||
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
th:nth-child(1), td:nth-child(1) { width: 5%; }
|
||||||
th:nth-child(2), td:nth-child(2),
|
th:nth-child(2), td:nth-child(2),
|
||||||
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
th:nth-child(3), td:nth-child(3) { width: 30%; }
|
||||||
th, td {
|
th, td {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
}
|
}
|
||||||
h2 {
|
h2 {
|
||||||
margin-top: 10px;
|
margin-top: 10px;
|
||||||
}
|
}
|
||||||
|
.copy-link-icon {
|
||||||
|
font-size: 20px;
|
||||||
|
margin-left: 10px;
|
||||||
|
color: #8f8f97;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.copy-link-icon:hover {
|
||||||
|
color: #282b2c;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="container-fluid"><h2>Failed Test Cases on Your branch (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true">
|
<div class="container-fluid"><h2 id="failed-test-cases-on-other-branches">Failed Test Cases on Other branches (Excludes Known Failure Cases)<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#failed-test-cases-on-other-branches')"></i></h2><table data-toggle="table" data-search="true" data-sticky-header="true">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Test Case</th>
|
<th>Test Case</th>
|
||||||
<th>Test Script File Path</th>
|
<th>Test Script File Path</th>
|
||||||
<th>Failure Reason</th>
|
<th>Failure Reason</th>
|
||||||
<th>Failures across all other branches (20 latest testcases)</th>
|
<th>Failures across all other branches (40 latest testcases)</th>
|
||||||
|
<th>Dut Log URL</th>
|
||||||
<th>Job URL</th>
|
<th>Job URL</th>
|
||||||
<th>Grafana URL</th>
|
<th>Grafana URL</th>
|
||||||
</tr>
|
</tr>
|
||||||
@ -36,7 +49,8 @@
|
|||||||
<td>('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device</td>
|
<td>('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device</td>
|
||||||
<td>components/driver/test_apps/i2c_test_apps/pytest_i2c.py</td>
|
<td>components/driver/test_apps/i2c_test_apps/pytest_i2c.py</td>
|
||||||
<td>failed on setup with "EOFError"</td>
|
<td>failed on setup with "EOFError"</td>
|
||||||
<td>0 / 20</td>
|
<td>0 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('defaults', 'defaults').test_i2c_multi_device/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27defaults%27%2C%20%27defaults%27%29.test_i2c_multi_device">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27defaults%27%2C%20%27defaults%27%29.test_i2c_multi_device">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -44,7 +58,8 @@
|
|||||||
<td>esp32c3.release.test_esp_timer</td>
|
<td>esp32c3.release.test_esp_timer</td>
|
||||||
<td>components/esp_timer/test_apps/pytest_esp_timer_ut.py</td>
|
<td>components/esp_timer/test_apps/pytest_esp_timer_ut.py</td>
|
||||||
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</td>
|
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): ce710,len:0x2afc entry 0x403cc710 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_esp_timer/dut.txt</td>
|
||||||
<td>0 / 20</td>
|
<td>0 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_esp_timer/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_esp_timer">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_esp_timer">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -52,28 +67,17 @@
|
|||||||
<td>esp32c3.default.test_wpa_supplicant_ut</td>
|
<td>esp32c3.default.test_wpa_supplicant_ut</td>
|
||||||
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
||||||
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</td>
|
<td>pexpect.exceptions.TIMEOUT: Not found "Press ENTER to see the list of tests" Bytes in current buffer (color code eliminated): 0 d4 000 00x0000 x0000x00 000000 0 Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.default.test_wpa_supplicant_ut/dut.txt</td>
|
||||||
<td>0 / 20</td>
|
<td>0 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.default.test_wpa_supplicant_ut/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
|
||||||
</table><h2>Failed Test Cases on Other branches (Excludes Known Failure Cases)</h2><table data-toggle="table" data-search="true">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Test Case</th>
|
|
||||||
<th>Test Script File Path</th>
|
|
||||||
<th>Failure Reason</th>
|
|
||||||
<th>Failures across all other branches (20 latest testcases)</th>
|
|
||||||
<th>Job URL</th>
|
|
||||||
<th>Grafana URL</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev</td>
|
<td>('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev</td>
|
||||||
<td>components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py</td>
|
<td>components/driver/test_apps/i2s_test_apps/i2s_multi_dev/pytest_i2s_multi_dev.py</td>
|
||||||
<td>failed on setup with "EOFError"</td>
|
<td>failed on setup with "EOFError"</td>
|
||||||
<td>3 / 20</td>
|
<td>3 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/('esp32h2', 'esp32h2').('default', 'default').test_i2s_multi_dev/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27default%27%2C%20%27default%27%29.test_i2s_multi_dev">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=%28%27esp32h2%27%2C%20%27esp32h2%27%29.%28%27default%27%2C%20%27default%27%29.test_i2s_multi_dev">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -81,7 +85,8 @@
|
|||||||
<td>esp32c2.default.test_wpa_supplicant_ut</td>
|
<td>esp32c2.default.test_wpa_supplicant_ut</td>
|
||||||
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
<td>components/wpa_supplicant/test_apps/pytest_wpa_supplicant_ut.py</td>
|
||||||
<td>AssertionError: Unity test failed</td>
|
<td>AssertionError: Unity test failed</td>
|
||||||
<td>3 / 20</td>
|
<td>3 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c2.default.test_wpa_supplicant_ut/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.default.test_wpa_supplicant_ut">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c2.default.test_wpa_supplicant_ut">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -89,7 +94,8 @@
|
|||||||
<td>esp32c3.512safe.test_wear_levelling</td>
|
<td>esp32c3.512safe.test_wear_levelling</td>
|
||||||
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
||||||
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</td>
|
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 6673 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.512safe.test_wear_levelling/dut.txt</td>
|
||||||
<td>3 / 20</td>
|
<td>3 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.512safe.test_wear_levelling/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.512safe.test_wear_levelling">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.512safe.test_wear_levelling">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -97,12 +103,14 @@
|
|||||||
<td>esp32c3.release.test_wear_levelling</td>
|
<td>esp32c3.release.test_wear_levelling</td>
|
||||||
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
<td>components/wear_levelling/test_apps/pytest_wear_levelling.py</td>
|
||||||
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</td>
|
<td>pexpect.exceptions.TIMEOUT: Not found "re.compile(b'^[-]+\\s*(\\d+) Tests (\\d+) Failures (\\d+) Ignored\\s*(?P<result>OK|FAIL)', re.MULTILINE)" Bytes in current buffer (color code eliminated): Serial port /dev/ttyUSB16 Connecting.... Connecting.... esptool.py v4.7.0 Found 1 serial ports Chip is ESP32-C3 (QFN32) (revision v0.3) Features: WiFi, BLE, Embedded Flash 4MB... (total 24528 bytes) Please check the full log here: /builds/espressif/esp-idf/pytest_embedded/2024-05-17_17-50-04/esp32c3.release.test_wear_levelling/dut.txt</td>
|
||||||
<td>3 / 20</td>
|
<td>3 / 40</td>
|
||||||
|
<td><a href="https://url/esp/esp-idf/pytest-embedded/2024-07-01_10-53-05-207900/esp32c3.release.test_wear_levelling/dut.txt">link</a></td>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_wear_levelling">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.release.test_wear_levelling">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table><h2>Known Failure Cases</h2><table data-toggle="table" data-search="true">
|
</table><h2 id="known-failure-cases">Known Failure Cases<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#known-failure-cases')"></i></h2><table data-toggle="table" data-search="true" data-sticky-header="true">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Test Case</th>
|
<th>Test Case</th>
|
||||||
@ -142,7 +150,8 @@
|
|||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=esp32c3.default.test_wpa_supplicant_ut">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table><h2>Skipped Test Cases</h2><table data-toggle="table" data-search="true">
|
</table><h2 id="skipped-test-cases">Skipped Test Cases<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#skipped-test-cases')"></i></h2><table data-toggle="table" data-search="true" data-sticky-header="true">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Test Case</th>
|
<th>Test Case</th>
|
||||||
@ -159,7 +168,8 @@
|
|||||||
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_python_interpreter_win">link</a></td>
|
<td><a href="https://test_dashboard_host/d/Ucg477Fnz/case-list?var-case_id=test_python_interpreter_win">link</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table><h2>Succeeded Test Cases</h2><table data-toggle="table" data-search="true">
|
</table><h2 id="succeeded-test-cases">Succeeded Test Cases<i class="fas fa-link copy-link-icon"
|
||||||
|
onclick="copyPermalink('#succeeded-test-cases')"></i></h2><table data-toggle="table" data-search="true" data-sticky-header="true">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Test Case</th>
|
<th>Test Case</th>
|
||||||
@ -228,8 +238,29 @@
|
|||||||
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
|
||||||
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.0/dist/extensions/sticky-header/bootstrap-table-sticky-header.min.js"></script>
|
||||||
<script>
|
<script>
|
||||||
$(document).ready(function() {
|
$(window).on('load', function() {
|
||||||
|
var hash = window.location.hash;
|
||||||
|
if (hash) {
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(hash).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
function copyPermalink(anchorId) {
|
||||||
|
const fullUrl = window.location.origin + window.location.pathname + anchorId;
|
||||||
|
history.pushState(null, null, anchorId);
|
||||||
|
navigator.clipboard.writeText(fullUrl)
|
||||||
|
setTimeout(function() {
|
||||||
|
$('html, body').animate({ scrollTop: $(anchorId).offset().top }, 100);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<script>
|
||||||
|
$(document).ready(function() {
|
||||||
$('table.table td').each(function() {
|
$('table.table td').each(function() {
|
||||||
var cell = $(this);
|
var cell = $(this);
|
||||||
if (cell.text().length > 100) {
|
if (cell.text().length > 100) {
|
||||||
@ -239,7 +270,6 @@
|
|||||||
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
cell.append('<a href="#" class="toggle-link">Show More</a>');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$('body').on('click', '.toggle-link', function(e) {
|
$('body').on('click', '.toggle-link', function(e) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
var link = $(this);
|
var link = $(this);
|
||||||
@ -255,7 +285,7 @@
|
|||||||
toggleSpan.hide();
|
toggleSpan.hide();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -46,6 +46,17 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
self.addCleanup(self.gitlab_patcher.stop)
|
self.addCleanup(self.gitlab_patcher.stop)
|
||||||
self.addCleanup(self.env_patcher.stop)
|
self.addCleanup(self.env_patcher.stop)
|
||||||
self.addCleanup(self.failure_rate_patcher.stop)
|
self.addCleanup(self.failure_rate_patcher.stop)
|
||||||
|
self.addCleanup(self.cleanup_files)
|
||||||
|
|
||||||
|
def cleanup_files(self) -> None:
|
||||||
|
files_to_delete = [
|
||||||
|
self.target_test_report_generator.skipped_test_cases_report_file,
|
||||||
|
self.target_test_report_generator.succeeded_cases_report_file,
|
||||||
|
self.target_test_report_generator.failed_cases_report_file,
|
||||||
|
]
|
||||||
|
for file_path in files_to_delete:
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
|
||||||
def load_test_and_job_reports(self) -> None:
|
def load_test_and_job_reports(self) -> None:
|
||||||
self.expected_target_test_report_html = load_file(
|
self.expected_target_test_report_html = load_file(
|
||||||
@ -62,9 +73,23 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
jobs = [GitlabJob.from_json_data(job_json, failure_rates.get(job_json['name'], {})) for job_json in json.loads(jobs_response_raw)['jobs']]
|
jobs = [GitlabJob.from_json_data(job_json, failure_rates.get(job_json['name'], {})) for job_json in json.loads(jobs_response_raw)['jobs']]
|
||||||
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
test_cases = parse_testcases_from_filepattern(os.path.join(self.reports_sample_data_path, 'XUNIT_*.xml'))
|
||||||
self.target_test_report_generator = TargetTestReportGenerator(
|
self.target_test_report_generator = TargetTestReportGenerator(
|
||||||
project_id=123, mr_iid=1, pipeline_id=456, title='Test Report', test_cases=test_cases)
|
project_id=123,
|
||||||
|
mr_iid=1,
|
||||||
|
pipeline_id=456,
|
||||||
|
job_id=0,
|
||||||
|
commit_id='cccc',
|
||||||
|
title='Test Report',
|
||||||
|
test_cases=test_cases
|
||||||
|
)
|
||||||
self.job_report_generator = JobReportGenerator(
|
self.job_report_generator = JobReportGenerator(
|
||||||
project_id=123, mr_iid=1, pipeline_id=456, title='Job Report', jobs=jobs)
|
project_id=123,
|
||||||
|
mr_iid=1,
|
||||||
|
pipeline_id=456,
|
||||||
|
job_id=0,
|
||||||
|
commit_id='cccc',
|
||||||
|
title='Job Report',
|
||||||
|
jobs=jobs
|
||||||
|
)
|
||||||
self.target_test_report_generator._known_failure_cases_set = {
|
self.target_test_report_generator._known_failure_cases_set = {
|
||||||
'*.test_wpa_supplicant_ut',
|
'*.test_wpa_supplicant_ut',
|
||||||
'esp32c3.release.test_esp_timer',
|
'esp32c3.release.test_esp_timer',
|
||||||
@ -72,7 +97,7 @@ class TestReportGeneration(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
test_cases_failed = [tc for tc in test_cases if tc.is_failure]
|
test_cases_failed = [tc for tc in test_cases if tc.is_failure]
|
||||||
for index, tc in enumerate(test_cases_failed):
|
for index, tc in enumerate(test_cases_failed):
|
||||||
tc.latest_total_count = 20
|
tc.latest_total_count = 40
|
||||||
if index % 3 == 0:
|
if index % 3 == 0:
|
||||||
tc.latest_failed_count = 0
|
tc.latest_failed_count = 0
|
||||||
else:
|
else:
|
||||||
|
@ -66,10 +66,14 @@ def load_known_failure_cases() -> t.Optional[t.Set[str]]:
|
|||||||
if not known_failures_file:
|
if not known_failures_file:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
with open(known_failures_file) as f:
|
with open(known_failures_file, 'r') as f:
|
||||||
file_content = f.read()
|
file_content = f.read()
|
||||||
known_cases_list = re.sub(re.compile('#.*\n'), '', file_content).split()
|
|
||||||
return {case.strip() for case in known_cases_list}
|
pattern = re.compile(r'^(.*?)\s+#\s+([A-Z]+)-\d+', re.MULTILINE)
|
||||||
|
matches = pattern.findall(file_content)
|
||||||
|
|
||||||
|
known_cases_list = [match[0].strip() for match in matches]
|
||||||
|
return set(known_cases_list)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -111,7 +115,7 @@ def fetch_failed_jobs(commit_id: str) -> t.List[GitlabJob]:
|
|||||||
response = requests.post(
|
response = requests.post(
|
||||||
f'{ci_dash_api_backend_host}/jobs/failure_ratio',
|
f'{ci_dash_api_backend_host}/jobs/failure_ratio',
|
||||||
headers={'Authorization': f'Bearer {token}'},
|
headers={'Authorization': f'Bearer {token}'},
|
||||||
json={'job_names': failed_job_names, 'exclude_branches': [os.getenv('CI_COMMIT_BRANCH', '')]},
|
json={'job_names': failed_job_names, 'exclude_branches': [os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME', '')]},
|
||||||
)
|
)
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
print(f'Failed to fetch jobs failure rate data: {response.status_code} with error: {response.text}')
|
print(f'Failed to fetch jobs failure rate data: {response.status_code} with error: {response.text}')
|
||||||
@ -128,20 +132,20 @@ def fetch_failed_jobs(commit_id: str) -> t.List[GitlabJob]:
|
|||||||
return combined_jobs
|
return combined_jobs
|
||||||
|
|
||||||
|
|
||||||
def fetch_failed_testcases_failure_ratio(failed_testcases: t.List[TestCase]) -> t.List[TestCase]:
|
def fetch_failed_testcases_failure_ratio(failed_testcases: t.List[TestCase], branches_filter: dict) -> t.List[TestCase]:
|
||||||
"""
|
"""
|
||||||
Fetches info about failure rates of testcases using an API request to ci-dashboard-api.
|
Fetches info about failure rates of testcases using an API request to ci-dashboard-api.
|
||||||
:param failed_testcases: The list of failed testcases models.
|
:param failed_testcases: The list of failed testcases models.
|
||||||
|
:param branches_filter: The filter to filter testcases by branch names.
|
||||||
:return: A list of testcases with enriched with failure rates data.
|
:return: A list of testcases with enriched with failure rates data.
|
||||||
"""
|
"""
|
||||||
token = os.getenv('ESPCI_TOKEN', '')
|
token = os.getenv('ESPCI_TOKEN', '')
|
||||||
ci_dash_api_backend_host = os.getenv('CI_DASHBOARD_API', '')
|
ci_dash_api_backend_host = os.getenv('CI_DASHBOARD_API', '')
|
||||||
|
req_json = {'testcase_names': list(set([testcase.name for testcase in failed_testcases])), **branches_filter}
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
f'{ci_dash_api_backend_host}/testcases/failure_ratio',
|
f'{ci_dash_api_backend_host}/testcases/failure_ratio',
|
||||||
headers={'Authorization': f'Bearer {token}'},
|
headers={'Authorization': f'Bearer {token}'},
|
||||||
json={'testcase_names': [testcase.name for testcase in failed_testcases],
|
json=req_json,
|
||||||
'exclude_branches': [os.getenv('CI_COMMIT_BRANCH', '')],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
print(f'Failed to fetch testcases failure rate data: {response.status_code} with error: {response.text}')
|
print(f'Failed to fetch testcases failure rate data: {response.status_code} with error: {response.text}')
|
||||||
@ -166,3 +170,34 @@ def load_file(file_path: str) -> str:
|
|||||||
"""
|
"""
|
||||||
with open(file_path, 'r') as file:
|
with open(file_path, 'r') as file:
|
||||||
return file.read()
|
return file.read()
|
||||||
|
|
||||||
|
|
||||||
|
def format_permalink(s: str) -> str:
|
||||||
|
"""
|
||||||
|
Formats a given string into a permalink.
|
||||||
|
|
||||||
|
:param s: The string to be formatted into a permalink.
|
||||||
|
:return: The formatted permalink as a string.
|
||||||
|
"""
|
||||||
|
end_index = s.find('(')
|
||||||
|
|
||||||
|
if end_index != -1:
|
||||||
|
trimmed_string = s[:end_index].strip()
|
||||||
|
else:
|
||||||
|
trimmed_string = s.strip()
|
||||||
|
|
||||||
|
formatted_string = trimmed_string.lower().replace(' ', '-')
|
||||||
|
|
||||||
|
return formatted_string
|
||||||
|
|
||||||
|
|
||||||
|
def get_report_url(job_id: int, output_filepath: str) -> str:
|
||||||
|
"""
|
||||||
|
Generates the url of the path where the report will be stored in the job's artifacts .
|
||||||
|
|
||||||
|
:param job_id: The job identifier used to construct the URL.
|
||||||
|
:param output_filepath: The path to the output file.
|
||||||
|
:return: The modified URL pointing to the job's artifacts.
|
||||||
|
"""
|
||||||
|
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
|
||||||
|
return f'{url}/-/jobs/{job_id}/artifacts/{output_filepath}'
|
||||||
|
Loading…
x
Reference in New Issue
Block a user