mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-14 06:04:19 +00:00
style: format python files with isort and double-quote-string-fixer
This commit is contained in:
@@ -39,9 +39,9 @@ The Basic logic to assign test cases is as follow:
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -50,13 +50,13 @@ try:
|
||||
except ImportError:
|
||||
from yaml import Loader as Loader
|
||||
|
||||
from . import (CaseConfig, SearchCases, GitlabCIJob, console_log)
|
||||
from . import CaseConfig, GitlabCIJob, SearchCases, console_log
|
||||
|
||||
|
||||
class Group(object):
|
||||
MAX_EXECUTION_TIME = 30
|
||||
MAX_CASE = 15
|
||||
SORT_KEYS = ["env_tag"]
|
||||
SORT_KEYS = ['env_tag']
|
||||
# Matching CI job rules could be different from the way we want to group test cases.
|
||||
# For example, when assign unit test cases, different test cases need to use different test functions.
|
||||
# We need to put them into different groups.
|
||||
@@ -92,7 +92,7 @@ class Group(object):
|
||||
|
||||
:return: True or False
|
||||
"""
|
||||
max_time = (sum([self._get_case_attr(x, "execution_time") for x in self.case_list])
|
||||
max_time = (sum([self._get_case_attr(x, 'execution_time') for x in self.case_list])
|
||||
< self.MAX_EXECUTION_TIME)
|
||||
max_case = (len(self.case_list) < self.MAX_CASE)
|
||||
return max_time and max_case
|
||||
@@ -135,8 +135,8 @@ class Group(object):
|
||||
:return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group}
|
||||
"""
|
||||
output_data = {
|
||||
"Filter": self.filters,
|
||||
"CaseConfig": [{"name": self._get_case_attr(x, "name")} for x in self.case_list],
|
||||
'Filter': self.filters,
|
||||
'CaseConfig': [{'name': self._get_case_attr(x, 'name')} for x in self.case_list],
|
||||
}
|
||||
return output_data
|
||||
|
||||
@@ -149,12 +149,12 @@ class AssignTest(object):
|
||||
:param ci_config_file: path of ``.gitlab-ci.yml``
|
||||
"""
|
||||
# subclass need to rewrite CI test job pattern, to filter all test jobs
|
||||
CI_TEST_JOB_PATTERN = re.compile(r"^test_.+")
|
||||
CI_TEST_JOB_PATTERN = re.compile(r'^test_.+')
|
||||
# by default we only run function in CI, as other tests could take long time
|
||||
DEFAULT_FILTER = {
|
||||
"category": "function",
|
||||
"ignore": False,
|
||||
"supported_in_ci": True,
|
||||
'category': 'function',
|
||||
'ignore': False,
|
||||
'supported_in_ci': True,
|
||||
}
|
||||
|
||||
def __init__(self, test_case_paths, ci_config_file, case_group=Group):
|
||||
@@ -168,25 +168,25 @@ class AssignTest(object):
|
||||
def _handle_parallel_attribute(job_name, job):
|
||||
jobs_out = []
|
||||
try:
|
||||
for i in range(job["parallel"]):
|
||||
jobs_out.append(GitlabCIJob.Job(job, job_name + "_{}".format(i + 1)))
|
||||
for i in range(job['parallel']):
|
||||
jobs_out.append(GitlabCIJob.Job(job, job_name + '_{}'.format(i + 1)))
|
||||
except KeyError:
|
||||
# Gitlab don't allow to set parallel to 1.
|
||||
# to make test job name same ($CI_JOB_NAME_$CI_NODE_INDEX),
|
||||
# we append "_" to jobs don't have parallel attribute
|
||||
jobs_out.append(GitlabCIJob.Job(job, job_name + "_"))
|
||||
jobs_out.append(GitlabCIJob.Job(job, job_name + '_'))
|
||||
return jobs_out
|
||||
|
||||
def _parse_gitlab_ci_config(self, ci_config_file):
|
||||
|
||||
with open(ci_config_file, "r") as f:
|
||||
with open(ci_config_file, 'r') as f:
|
||||
ci_config = yaml.load(f, Loader=Loader)
|
||||
|
||||
job_list = list()
|
||||
for job_name in ci_config:
|
||||
if self.CI_TEST_JOB_PATTERN.search(job_name) is not None:
|
||||
job_list.extend(self._handle_parallel_attribute(job_name, ci_config[job_name]))
|
||||
job_list.sort(key=lambda x: x["name"])
|
||||
job_list.sort(key=lambda x: x['name'])
|
||||
return job_list
|
||||
|
||||
def search_cases(self, case_filter=None):
|
||||
@@ -256,7 +256,7 @@ class AssignTest(object):
|
||||
Bot could also pass test count.
|
||||
If filtered cases need to be tested for several times, then we do duplicate them here.
|
||||
"""
|
||||
test_count = os.getenv("BOT_TEST_COUNT")
|
||||
test_count = os.getenv('BOT_TEST_COUNT')
|
||||
if test_count:
|
||||
test_count = int(test_count)
|
||||
self.test_cases *= test_count
|
||||
@@ -269,7 +269,7 @@ class AssignTest(object):
|
||||
"""
|
||||
group_count = dict()
|
||||
for group in test_groups:
|
||||
key = ",".join(group.ci_job_match_keys)
|
||||
key = ','.join(group.ci_job_match_keys)
|
||||
try:
|
||||
group_count[key] += 1
|
||||
except KeyError:
|
||||
@@ -305,26 +305,26 @@ class AssignTest(object):
|
||||
# print debug info
|
||||
# total requirement of current pipeline
|
||||
required_group_count = self._count_groups_by_keys(test_groups)
|
||||
console_log("Required job count by tags:")
|
||||
console_log('Required job count by tags:')
|
||||
for tags in required_group_count:
|
||||
console_log("\t{}: {}".format(tags, required_group_count[tags]))
|
||||
console_log('\t{}: {}'.format(tags, required_group_count[tags]))
|
||||
|
||||
# number of unused jobs
|
||||
not_used_jobs = [job for job in self.jobs if "case group" not in job]
|
||||
not_used_jobs = [job for job in self.jobs if 'case group' not in job]
|
||||
if not_used_jobs:
|
||||
console_log("{} jobs not used. Please check if you define too much jobs".format(len(not_used_jobs)), "O")
|
||||
console_log('{} jobs not used. Please check if you define too much jobs'.format(len(not_used_jobs)), 'O')
|
||||
for job in not_used_jobs:
|
||||
console_log("\t{}".format(job["name"]), "O")
|
||||
console_log('\t{}'.format(job['name']), 'O')
|
||||
|
||||
# failures
|
||||
if failed_to_assign:
|
||||
console_log("Too many test cases vs jobs to run. "
|
||||
"Please increase parallel count in tools/ci/config/target-test.yml "
|
||||
"for jobs with specific tags:", "R")
|
||||
console_log('Too many test cases vs jobs to run. '
|
||||
'Please increase parallel count in tools/ci/config/target-test.yml '
|
||||
'for jobs with specific tags:', 'R')
|
||||
failed_group_count = self._count_groups_by_keys(failed_to_assign)
|
||||
for tags in failed_group_count:
|
||||
console_log("\t{}: {}".format(tags, failed_group_count[tags]), "R")
|
||||
raise RuntimeError("Failed to assign test case to CI jobs")
|
||||
console_log('\t{}: {}'.format(tags, failed_group_count[tags]), 'R')
|
||||
raise RuntimeError('Failed to assign test case to CI jobs')
|
||||
|
||||
def output_configs(self, output_path):
|
||||
"""
|
||||
|
@@ -141,9 +141,9 @@ def filter_test_cases(test_methods, case_filter):
|
||||
|
||||
class Parser(object):
|
||||
DEFAULT_CONFIG = {
|
||||
"TestConfig": dict(),
|
||||
"Filter": dict(),
|
||||
"CaseConfig": [{"extra_data": None}],
|
||||
'TestConfig': dict(),
|
||||
'Filter': dict(),
|
||||
'CaseConfig': [{'extra_data': None}],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -156,7 +156,7 @@ class Parser(object):
|
||||
"""
|
||||
configs = cls.DEFAULT_CONFIG.copy()
|
||||
if config_file:
|
||||
with open(config_file, "r") as f:
|
||||
with open(config_file, 'r') as f:
|
||||
configs.update(yaml.load(f, Loader=Loader))
|
||||
return configs
|
||||
|
||||
@@ -170,8 +170,8 @@ class Parser(object):
|
||||
"""
|
||||
output = dict()
|
||||
for key in overwrite:
|
||||
module = importlib.import_module(overwrite[key]["package"])
|
||||
output[key] = module.__getattribute__(overwrite[key]["class"])
|
||||
module = importlib.import_module(overwrite[key]['package'])
|
||||
output[key] = module.__getattribute__(overwrite[key]['class'])
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
@@ -185,10 +185,10 @@ class Parser(object):
|
||||
"""
|
||||
configs = cls.parse_config_file(config_file)
|
||||
test_case_list = []
|
||||
for _config in configs["CaseConfig"]:
|
||||
_filter = configs["Filter"].copy()
|
||||
_overwrite = cls.handle_overwrite_args(_config.pop("overwrite", dict()))
|
||||
_extra_data = _config.pop("extra_data", None)
|
||||
for _config in configs['CaseConfig']:
|
||||
_filter = configs['Filter'].copy()
|
||||
_overwrite = cls.handle_overwrite_args(_config.pop('overwrite', dict()))
|
||||
_extra_data = _config.pop('extra_data', None)
|
||||
_filter.update(_config)
|
||||
|
||||
# Try get target from yml
|
||||
@@ -222,8 +222,8 @@ class Generator(object):
|
||||
|
||||
def __init__(self):
|
||||
self.default_config = {
|
||||
"TestConfig": dict(),
|
||||
"Filter": dict(),
|
||||
'TestConfig': dict(),
|
||||
'Filter': dict(),
|
||||
}
|
||||
|
||||
def set_default_configs(self, test_config, case_filter):
|
||||
@@ -232,7 +232,7 @@ class Generator(object):
|
||||
:param case_filter: "Filter" value
|
||||
:return: None
|
||||
"""
|
||||
self.default_config = {"TestConfig": test_config, "Filter": case_filter}
|
||||
self.default_config = {'TestConfig': test_config, 'Filter': case_filter}
|
||||
|
||||
def generate_config(self, case_configs, output_file):
|
||||
"""
|
||||
@@ -241,6 +241,6 @@ class Generator(object):
|
||||
:return: None
|
||||
"""
|
||||
config = self.default_config.copy()
|
||||
config.update({"CaseConfig": case_configs})
|
||||
with open(output_file, "w") as f:
|
||||
config.update({'CaseConfig': case_configs})
|
||||
with open(output_file, 'w') as f:
|
||||
yaml.dump(config, f)
|
||||
|
@@ -26,8 +26,8 @@ class Job(dict):
|
||||
"""
|
||||
def __init__(self, job, job_name):
|
||||
super(Job, self).__init__(job)
|
||||
self["name"] = job_name
|
||||
self.tags = set(self["tags"])
|
||||
self['name'] = job_name
|
||||
self.tags = set(self['tags'])
|
||||
|
||||
def match_group(self, group):
|
||||
"""
|
||||
@@ -38,7 +38,7 @@ class Job(dict):
|
||||
:return: True or False
|
||||
"""
|
||||
match_result = False
|
||||
if "case group" not in self and group.ci_job_match_keys == self.tags:
|
||||
if 'case group' not in self and group.ci_job_match_keys == self.tags:
|
||||
# group not assigned and all tags match
|
||||
match_result = True
|
||||
return match_result
|
||||
@@ -49,7 +49,7 @@ class Job(dict):
|
||||
|
||||
:param group: the case group to assign
|
||||
"""
|
||||
self["case group"] = group
|
||||
self['case group'] = group
|
||||
|
||||
def output_config(self, file_path):
|
||||
"""
|
||||
@@ -59,7 +59,7 @@ class Job(dict):
|
||||
:param file_path: output file path
|
||||
:return: None
|
||||
"""
|
||||
file_name = os.path.join(file_path, self["name"] + ".yml")
|
||||
if "case group" in self:
|
||||
with open(file_name, "w") as f:
|
||||
yaml.safe_dump(self["case group"].output(), f, encoding='utf-8', default_flow_style=False)
|
||||
file_name = os.path.join(file_path, self['name'] + '.yml')
|
||||
if 'case group' in self:
|
||||
with open(file_name, 'w') as f:
|
||||
yaml.safe_dump(self['case group'].output(), f, encoding='utf-8', default_flow_style=False)
|
||||
|
@@ -13,23 +13,23 @@
|
||||
# limitations under the License.
|
||||
|
||||
""" search test cases from a given file or path """
|
||||
import os
|
||||
import fnmatch
|
||||
import types
|
||||
import copy
|
||||
import fnmatch
|
||||
import os
|
||||
import types
|
||||
|
||||
from . import load_source
|
||||
|
||||
|
||||
class Search(object):
|
||||
TEST_CASE_FILE_PATTERN = "*_test.py"
|
||||
TEST_CASE_FILE_PATTERN = '*_test.py'
|
||||
SUPPORT_REPLICATE_CASES_KEY = ['target']
|
||||
|
||||
@classmethod
|
||||
def _search_cases_from_file(cls, file_name):
|
||||
""" get test cases from test case .py file """
|
||||
|
||||
print("Try to get cases from: " + file_name)
|
||||
print('Try to get cases from: ' + file_name)
|
||||
test_functions = []
|
||||
try:
|
||||
mod = load_source(file_name)
|
||||
@@ -42,14 +42,14 @@ class Search(object):
|
||||
except AttributeError:
|
||||
continue
|
||||
except ImportError as e:
|
||||
print("ImportError: \r\n\tFile:" + file_name + "\r\n\tError:" + str(e))
|
||||
print('ImportError: \r\n\tFile:' + file_name + '\r\n\tError:' + str(e))
|
||||
|
||||
test_functions_out = []
|
||||
for case in test_functions:
|
||||
test_functions_out += cls.replicate_case(case)
|
||||
|
||||
for i, test_function in enumerate(test_functions_out):
|
||||
print("\t{}. {} <{}>".format(i + 1, test_function.case_info["name"], test_function.case_info["target"]))
|
||||
print('\t{}. {} <{}>'.format(i + 1, test_function.case_info['name'], test_function.case_info['target']))
|
||||
test_function.case_info['app_dir'] = os.path.dirname(file_name)
|
||||
return test_functions_out
|
||||
|
||||
@@ -58,7 +58,7 @@ class Search(object):
|
||||
""" search all test case files recursively of a path """
|
||||
|
||||
if not os.path.exists(test_case):
|
||||
raise OSError("test case path not exist")
|
||||
raise OSError('test case path not exist')
|
||||
if os.path.isdir(test_case):
|
||||
test_case_files = []
|
||||
for root, _, file_names in os.walk(test_case):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import time
|
||||
@@ -7,35 +8,35 @@ import traceback
|
||||
from .. import Env
|
||||
|
||||
_COLOR_CODES = {
|
||||
"white": u'\033[0m',
|
||||
"red": u'\033[31m',
|
||||
"green": u'\033[32m',
|
||||
"orange": u'\033[33m',
|
||||
"blue": u'\033[34m',
|
||||
"purple": u'\033[35m',
|
||||
"W": u'\033[0m',
|
||||
"R": u'\033[31m',
|
||||
"G": u'\033[32m',
|
||||
"O": u'\033[33m',
|
||||
"B": u'\033[34m',
|
||||
"P": u'\033[35m'
|
||||
'white': u'\033[0m',
|
||||
'red': u'\033[31m',
|
||||
'green': u'\033[32m',
|
||||
'orange': u'\033[33m',
|
||||
'blue': u'\033[34m',
|
||||
'purple': u'\033[35m',
|
||||
'W': u'\033[0m',
|
||||
'R': u'\033[31m',
|
||||
'G': u'\033[32m',
|
||||
'O': u'\033[33m',
|
||||
'B': u'\033[34m',
|
||||
'P': u'\033[35m'
|
||||
}
|
||||
|
||||
|
||||
def _get_log_file_name():
|
||||
if Env.Env.CURRENT_LOG_FOLDER:
|
||||
file_name = os.path.join(Env.Env.CURRENT_LOG_FOLDER, "console.log")
|
||||
file_name = os.path.join(Env.Env.CURRENT_LOG_FOLDER, 'console.log')
|
||||
else:
|
||||
raise OSError("env log folder does not exist, will not save to log file")
|
||||
raise OSError('env log folder does not exist, will not save to log file')
|
||||
return file_name
|
||||
|
||||
|
||||
def format_timestamp():
|
||||
ts = time.time()
|
||||
return "{}:{}".format(time.strftime("%m-%d %H:%M:%S", time.localtime(ts)), str(ts % 1)[2:5])
|
||||
return '{}:{}'.format(time.strftime('%m-%d %H:%M:%S', time.localtime(ts)), str(ts % 1)[2:5])
|
||||
|
||||
|
||||
def console_log(data, color="white", end="\n"):
|
||||
def console_log(data, color='white', end='\n'):
|
||||
"""
|
||||
log data to console.
|
||||
(if not flush console log, Gitlab-CI won't update logs during job execution)
|
||||
@@ -44,19 +45,19 @@ def console_log(data, color="white", end="\n"):
|
||||
:param color: color
|
||||
"""
|
||||
if color not in _COLOR_CODES:
|
||||
color = "white"
|
||||
color = 'white'
|
||||
color_codes = _COLOR_CODES[color]
|
||||
if isinstance(data, type(b'')):
|
||||
data = data.decode('utf-8', 'replace')
|
||||
print(color_codes + data, end=end)
|
||||
if color not in ["white", "W"]:
|
||||
if color not in ['white', 'W']:
|
||||
# reset color to white for later logs
|
||||
print(_COLOR_CODES["white"] + u"\r")
|
||||
print(_COLOR_CODES['white'] + u'\r')
|
||||
sys.stdout.flush()
|
||||
log_data = "[{}] ".format(format_timestamp()) + data
|
||||
log_data = '[{}] '.format(format_timestamp()) + data
|
||||
try:
|
||||
log_file = _get_log_file_name()
|
||||
with open(log_file, "a+") as f:
|
||||
with open(log_file, 'a+') as f:
|
||||
f.write(log_data + end)
|
||||
except OSError:
|
||||
pass
|
||||
@@ -108,4 +109,4 @@ def handle_unexpected_exception(junit_test_case, exception):
|
||||
traceback.print_exc()
|
||||
# AssertionError caused by an 'assert' statement has an empty string as its 'str' form
|
||||
e_str = str(exception) if str(exception) else repr(exception)
|
||||
junit_test_case.add_failure_info("Unexpected exception: {}\n{}".format(e_str, traceback.format_exc()))
|
||||
junit_test_case.add_failure_info('Unexpected exception: {}\n{}'.format(e_str, traceback.format_exc()))
|
||||
|
Reference in New Issue
Block a user