tiny-test-fw: move to tools/esp_python_packages:

make `tiny_test_fw` as a package and move to root path of idf python
packages
This commit is contained in:
He Yin Ling
2019-11-27 11:21:33 +08:00
parent f5e60524ac
commit d621d0e88e
29 changed files with 37 additions and 43 deletions

View File

@@ -0,0 +1,60 @@
# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Command line tool to assign example tests to CI test jobs.
"""
# TODO: Need to handle running examples on different chips
import os
import sys
import re
import argparse
import gitlab_api
from tiny_test_fw.Utility import CIAssignTest
class ExampleGroup(CIAssignTest.Group):
SORT_KEYS = CI_JOB_MATCH_KEYS = ["env_tag", "chip"]
class CIExampleAssignTest(CIAssignTest.AssignTest):
CI_TEST_JOB_PATTERN = re.compile(r"^example_test_.+")
class ArtifactFile(object):
def __init__(self, project_id, job_name, artifact_file_path):
self.gitlab_api = gitlab_api.Gitlab(project_id)
def process(self):
pass
def output(self):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("test_case",
help="test case folder or file")
parser.add_argument("ci_config_file",
help="gitlab ci config file")
parser.add_argument("output_path",
help="output path of config files")
args = parser.parse_args()
assign_test = CIExampleAssignTest(args.test_case, args.ci_config_file, case_group=ExampleGroup)
assign_test.assign_cases()
assign_test.output_configs(args.output_path)

View File

@@ -0,0 +1,198 @@
"""
Command line tool to assign unit tests to CI test jobs.
"""
import re
import argparse
import yaml
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader as Loader
from tiny_test_fw.Utility import CIAssignTest
class Group(CIAssignTest.Group):
SORT_KEYS = ["test environment", "tags", "chip_target"]
MAX_CASE = 50
ATTR_CONVERT_TABLE = {
"execution_time": "execution time"
}
CI_JOB_MATCH_KEYS = ["test environment"]
DUT_CLS_NAME = {
"esp32": "ESP32DUT",
"esp32s2beta": "ESP32S2DUT",
"esp8266": "ESP8266DUT",
}
def __init__(self, case):
super(Group, self).__init__(case)
for tag in self._get_case_attr(case, "tags"):
self.ci_job_match_keys.add(tag)
@staticmethod
def _get_case_attr(case, attr):
if attr in Group.ATTR_CONVERT_TABLE:
attr = Group.ATTR_CONVERT_TABLE[attr]
return case[attr]
def add_extra_case(self, case):
""" If current group contains all tags required by case, then add succeed """
added = False
if self.accept_new_case():
for key in self.filters:
if self._get_case_attr(case, key) != self.filters[key]:
if key == "tags":
if self._get_case_attr(case, key).issubset(self.filters[key]):
continue
break
else:
self.case_list.append(case)
added = True
return added
def _create_extra_data(self, test_cases, test_function):
"""
For unit test case, we need to copy some attributes of test cases into config file.
So unit test function knows how to run the case.
"""
case_data = []
for case in test_cases:
one_case_data = {
"config": self._get_case_attr(case, "config"),
"name": self._get_case_attr(case, "summary"),
"reset": self._get_case_attr(case, "reset"),
"timeout": self._get_case_attr(case, "timeout"),
}
if test_function in ["run_multiple_devices_cases", "run_multiple_stage_cases"]:
try:
one_case_data["child case num"] = self._get_case_attr(case, "child case num")
except KeyError as e:
print("multiple devices/stages cases must contains at least two test functions")
print("case name: {}".format(one_case_data["name"]))
raise e
case_data.append(one_case_data)
return case_data
def _divide_case_by_test_function(self):
"""
divide cases of current test group by test function they need to use
:return: dict of list of cases for each test functions
"""
case_by_test_function = {
"run_multiple_devices_cases": [],
"run_multiple_stage_cases": [],
"run_unit_test_cases": [],
}
for case in self.case_list:
if case["multi_device"] == "Yes":
case_by_test_function["run_multiple_devices_cases"].append(case)
elif case["multi_stage"] == "Yes":
case_by_test_function["run_multiple_stage_cases"].append(case)
else:
case_by_test_function["run_unit_test_cases"].append(case)
return case_by_test_function
def output(self):
"""
output data for job configs
:return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group}
"""
target = self._get_case_attr(self.case_list[0], "chip_target")
if target:
overwrite = {
"dut": {
"package": "ttfw_idf",
"class": self.DUT_CLS_NAME[target],
}
}
else:
overwrite = dict()
case_by_test_function = self._divide_case_by_test_function()
output_data = {
# we don't need filter for test function, as UT uses a few test functions for all cases
"CaseConfig": [
{
"name": test_function,
"extra_data": self._create_extra_data(test_cases, test_function),
"overwrite": overwrite,
} for test_function, test_cases in case_by_test_function.iteritems() if test_cases
],
}
return output_data
class UnitTestAssignTest(CIAssignTest.AssignTest):
CI_TEST_JOB_PATTERN = re.compile(r"^UT_.+")
def __init__(self, test_case_path, ci_config_file):
CIAssignTest.AssignTest.__init__(self, test_case_path, ci_config_file, case_group=Group)
def _search_cases(self, test_case_path, case_filter=None):
"""
For unit test case, we don't search for test functions.
The unit test cases is stored in a yaml file which is created in job build-idf-test.
"""
try:
with open(test_case_path, "r") as f:
raw_data = yaml.load(f, Loader=Loader)
test_cases = raw_data["test cases"]
for case in test_cases:
case["tags"] = set(case["tags"])
except IOError:
print("Test case path is invalid. Should only happen when use @bot to skip unit test.")
test_cases = []
# filter keys are lower case. Do map lower case keys with original keys.
try:
key_mapping = {x.lower(): x for x in test_cases[0].keys()}
except IndexError:
key_mapping = dict()
if case_filter:
for key in case_filter:
filtered_cases = []
for case in test_cases:
try:
mapped_key = key_mapping[key]
# bot converts string to lower case
if isinstance(case[mapped_key], str):
_value = case[mapped_key].lower()
else:
_value = case[mapped_key]
if _value in case_filter[key]:
filtered_cases.append(case)
except KeyError:
# case don't have this key, regard as filter success
filtered_cases.append(case)
test_cases = filtered_cases
# sort cases with configs and test functions
# in later stage cases with similar attributes are more likely to be assigned to the same job
# it will reduce the count of flash DUT operations
test_cases.sort(key=lambda x: x["config"] + x["multi_stage"] + x["multi_device"])
return test_cases
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("test_case",
help="test case folder or file")
parser.add_argument("ci_config_file",
help="gitlab ci config file")
parser.add_argument("output_path",
help="output path of config files")
args = parser.parse_args()
assign_test = UnitTestAssignTest(args.test_case, args.ci_config_file)
assign_test.assign_cases()
assign_test.output_configs(args.output_path)

View File

@@ -0,0 +1,273 @@
# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" IDF Test Applications """
import subprocess
import os
import json
from tiny_test_fw import App
class IDFApp(App.BaseApp):
"""
Implements common esp-idf application behavior.
idf applications should inherent from this class and overwrite method get_binary_path.
"""
IDF_DOWNLOAD_CONFIG_FILE = "download.config"
IDF_FLASH_ARGS_FILE = "flasher_args.json"
def __init__(self, app_path, config_name=None, target=None):
super(IDFApp, self).__init__(app_path)
self.config_name = config_name
self.target = target
self.idf_path = self.get_sdk_path()
self.binary_path = self.get_binary_path(app_path, config_name)
self.elf_file = self._get_elf_file_path(self.binary_path)
assert os.path.exists(self.binary_path)
sdkconfig_dict = self.get_sdkconfig()
if "CONFIG_APP_BUILD_GENERATE_BINARIES" in sdkconfig_dict:
# There are no flashing targets available when no binaries where generated.
if self.IDF_DOWNLOAD_CONFIG_FILE not in os.listdir(self.binary_path):
if self.IDF_FLASH_ARGS_FILE not in os.listdir(self.binary_path):
msg = ("Neither {} nor {} exists. "
"Try to run 'make print_flash_cmd | tail -n 1 > {}/{}' "
"or 'idf.py build' "
"for resolving the issue."
"").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE,
self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE)
raise AssertionError(msg)
self.flash_files, self.flash_settings = self._parse_flash_download_config()
self.partition_table = self._parse_partition_table()
@classmethod
def get_sdk_path(cls):
idf_path = os.getenv("IDF_PATH")
assert idf_path
assert os.path.exists(idf_path)
return idf_path
def _get_sdkconfig_paths(self):
"""
returns list of possible paths where sdkconfig could be found
Note: could be overwritten by a derived class to provide other locations or order
"""
return [os.path.join(self.binary_path, "sdkconfig"), os.path.join(self.binary_path, "..", "sdkconfig")]
def get_sdkconfig(self):
"""
reads sdkconfig and returns a dictionary with all configuredvariables
:param sdkconfig_file: location of sdkconfig
:raise: AssertionError: if sdkconfig file does not exist in defined paths
"""
d = {}
sdkconfig_file = None
for i in self._get_sdkconfig_paths():
if os.path.exists(i):
sdkconfig_file = i
break
assert sdkconfig_file is not None
with open(sdkconfig_file) as f:
for line in f:
configs = line.split('=')
if len(configs) == 2:
d[configs[0]] = configs[1].rstrip()
return d
def get_binary_path(self, app_path, config_name=None):
"""
get binary path according to input app_path.
subclass must overwrite this method.
:param app_path: path of application
:param config_name: name of the application build config
:return: abs app binary path
"""
pass
@staticmethod
def _get_elf_file_path(binary_path):
ret = ""
file_names = os.listdir(binary_path)
for fn in file_names:
if os.path.splitext(fn)[1] == ".elf":
ret = os.path.join(binary_path, fn)
return ret
def _parse_flash_download_config(self):
"""
Parse flash download config from build metadata files
Sets self.flash_files, self.flash_settings
(Called from constructor)
Returns (flash_files, flash_settings)
"""
if self.IDF_FLASH_ARGS_FILE in os.listdir(self.binary_path):
# CMake version using build metadata file
with open(os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE), "r") as f:
args = json.load(f)
flash_files = [(offs,file) for (offs,file) in args["flash_files"].items() if offs != ""]
flash_settings = args["flash_settings"]
else:
# GNU Make version uses download.config arguments file
with open(os.path.join(self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE), "r") as f:
args = f.readlines()[-1].split(" ")
flash_files = []
flash_settings = {}
for idx in range(0, len(args), 2): # process arguments in pairs
if args[idx].startswith("--"):
# strip the -- from the command line argument
flash_settings[args[idx][2:]] = args[idx + 1]
else:
# offs, filename
flash_files.append((args[idx], args[idx + 1]))
# The build metadata file does not currently have details, which files should be encrypted and which not.
# Assume that all files should be encrypted if flash encryption is enabled in development mode.
sdkconfig_dict = self.get_sdkconfig()
flash_settings["encrypt"] = "CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT" in sdkconfig_dict
# make file offsets into integers, make paths absolute
flash_files = [(int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files]
return (flash_files, flash_settings)
def _parse_partition_table(self):
"""
Parse partition table contents based on app binaries
Returns partition_table data
(Called from constructor)
"""
partition_tool = os.path.join(self.idf_path,
"components",
"partition_table",
"gen_esp32part.py")
assert os.path.exists(partition_tool)
for (_, path) in self.flash_files:
if "partition" in path:
partition_file = os.path.join(self.binary_path, path)
break
else:
raise ValueError("No partition table found for IDF binary path: {}".format(self.binary_path))
process = subprocess.Popen(["python", partition_tool, partition_file],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
raw_data = process.stdout.read()
if isinstance(raw_data, bytes):
raw_data = raw_data.decode()
partition_table = dict()
for line in raw_data.splitlines():
if line[0] != "#":
try:
_name, _type, _subtype, _offset, _size, _flags = line.split(",")
if _size[-1] == "K":
_size = int(_size[:-1]) * 1024
elif _size[-1] == "M":
_size = int(_size[:-1]) * 1024 * 1024
else:
_size = int(_size)
except ValueError:
continue
partition_table[_name] = {
"type": _type,
"subtype": _subtype,
"offset": _offset,
"size": _size,
"flags": _flags
}
return partition_table
class Example(IDFApp):
def _get_sdkconfig_paths(self):
"""
overrides the parent method to provide exact path of sdkconfig for example tests
"""
return [os.path.join(self.binary_path, "..", "sdkconfig")]
def get_binary_path(self, app_path, config_name=None):
# build folder of example path
path = os.path.join(self.idf_path, app_path, "build")
if os.path.exists(path):
return path
if not config_name:
config_name = "default"
# Search for CI build folders.
# Path format: $IDF_PATH/build_examples/app_path_with_underscores/config/target
# (see tools/ci/build_examples_cmake.sh)
# For example: $IDF_PATH/build_examples/examples_get-started_blink/default/esp32
app_path_underscored = app_path.replace(os.path.sep, "_")
example_path = os.path.join(self.idf_path, "build_examples")
for dirpath in os.listdir(example_path):
if os.path.basename(dirpath) == app_path_underscored:
path = os.path.join(example_path, dirpath, config_name, self.target, "build")
return path
raise OSError("Failed to find example binary")
class UT(IDFApp):
def get_binary_path(self, app_path, config_name=None):
"""
:param app_path: app path
:param config_name: config name
:return: binary path
"""
if not config_name:
config_name = "default"
path = os.path.join(self.idf_path, app_path)
default_build_path = os.path.join(path, "build")
if os.path.exists(default_build_path):
return path
# first try to get from build folder of unit-test-app
path = os.path.join(self.idf_path, "tools", "unit-test-app", "build")
if os.path.exists(path):
# found, use bin in build path
return path
# ``make ut-build-all-configs`` or ``make ut-build-CONFIG`` will copy binary to output folder
path = os.path.join(self.idf_path, "tools", "unit-test-app", "output", config_name)
if os.path.exists(path):
return path
raise OSError("Failed to get unit-test-app binary path")
class SSC(IDFApp):
def get_binary_path(self, app_path, config_name=None):
# TODO: to implement SSC get binary path
return app_path
class AT(IDFApp):
def get_binary_path(self, app_path, config_name=None):
# TODO: to implement AT get binary path
return app_path

View File

@@ -0,0 +1,445 @@
# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" DUT for IDF applications """
import os
import os.path
import sys
import re
import functools
import tempfile
import subprocess
# python2 and python3 queue package name is different
try:
import Queue as _queue
except ImportError:
import queue as _queue
from serial.tools import list_ports
from tiny_test_fw import DUT, Utility
try:
import esptool
except ImportError: # cheat and use IDF's copy of esptool if available
idf_path = os.getenv("IDF_PATH")
if not idf_path or not os.path.exists(idf_path):
raise
sys.path.insert(0, os.path.join(idf_path, "components", "esptool_py", "esptool"))
import esptool
class IDFToolError(OSError):
pass
class IDFDUTException(RuntimeError):
pass
class IDFRecvThread(DUT.RecvThread):
PERFORMANCE_PATTERN = re.compile(r"\[Performance]\[(\w+)]: ([^\r\n]+)\r?\n")
EXCEPTION_PATTERNS = [
re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))"),
re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)"),
re.compile(r"(rst 0x\d+ \(TG\dWDT_SYS_RESET|TGWDT_CPU_RESET\))")
]
BACKTRACE_PATTERN = re.compile(r"Backtrace:((\s(0x[0-9a-f]{8}):0x[0-9a-f]{8})+)")
BACKTRACE_ADDRESS_PATTERN = re.compile(r"(0x[0-9a-f]{8}):0x[0-9a-f]{8}")
def __init__(self, read, dut):
super(IDFRecvThread, self).__init__(read, dut)
self.exceptions = _queue.Queue()
self.performance_items = _queue.Queue()
def collect_performance(self, comp_data):
matches = self.PERFORMANCE_PATTERN.findall(comp_data)
for match in matches:
Utility.console_log("[Performance][{}]: {}".format(match[0], match[1]),
color="orange")
self.performance_items.put((match[0], match[1]))
def detect_exception(self, comp_data):
for pattern in self.EXCEPTION_PATTERNS:
start = 0
while True:
match = pattern.search(comp_data, pos=start)
if match:
start = match.end()
self.exceptions.put(match.group(0))
Utility.console_log("[Exception]: {}".format(match.group(0)), color="red")
else:
break
def detect_backtrace(self, comp_data):
start = 0
while True:
match = self.BACKTRACE_PATTERN.search(comp_data, pos=start)
if match:
start = match.end()
Utility.console_log("[Backtrace]:{}".format(match.group(1)), color="red")
# translate backtrace
addresses = self.BACKTRACE_ADDRESS_PATTERN.findall(match.group(1))
translated_backtrace = ""
for addr in addresses:
ret = self.dut.lookup_pc_address(addr)
if ret:
translated_backtrace += ret + "\n"
if translated_backtrace:
Utility.console_log("Translated backtrace\n:" + translated_backtrace, color="yellow")
else:
Utility.console_log("Failed to translate backtrace", color="yellow")
else:
break
CHECK_FUNCTIONS = [collect_performance, detect_exception, detect_backtrace]
def _uses_esptool(func):
""" Suspend listener thread, connect with esptool,
call target function with esptool instance,
then resume listening for output
"""
@functools.wraps(func)
def handler(self, *args, **kwargs):
self.stop_receive()
settings = self.port_inst.get_settings()
try:
if not self._rom_inst:
self._rom_inst = esptool.ESPLoader.detect_chip(self.port_inst)
self._rom_inst.connect('hard_reset')
esp = self._rom_inst.run_stub()
ret = func(self, esp, *args, **kwargs)
# do hard reset after use esptool
esp.hard_reset()
finally:
# always need to restore port settings
self.port_inst.apply_settings(settings)
self.start_receive()
return ret
return handler
class IDFDUT(DUT.SerialDUT):
""" IDF DUT, extends serial with esptool methods
(Becomes aware of IDFApp instance which holds app-specific data)
"""
# /dev/ttyAMA0 port is listed in Raspberry Pi
# /dev/tty.Bluetooth-Incoming-Port port is listed in Mac
INVALID_PORT_PATTERN = re.compile(r"AMA|Bluetooth")
# if need to erase NVS partition in start app
ERASE_NVS = True
RECV_THREAD_CLS = IDFRecvThread
def __init__(self, name, port, log_file, app, allow_dut_exception=False, **kwargs):
super(IDFDUT, self).__init__(name, port, log_file, app, **kwargs)
self.allow_dut_exception = allow_dut_exception
self.exceptions = _queue.Queue()
self.performance_items = _queue.Queue()
self._rom_inst = None
@classmethod
def _get_rom(cls):
raise NotImplementedError("This is an abstraction class, method not defined.")
@classmethod
def get_mac(cls, app, port):
"""
get MAC address via esptool
:param app: application instance (to get tool)
:param port: serial port as string
:return: MAC address or None
"""
esp = None
try:
esp = cls._get_rom()(port)
esp.connect()
return esp.read_mac()
except RuntimeError:
return None
finally:
if esp:
# do hard reset after use esptool
esp.hard_reset()
esp._port.close()
@classmethod
def confirm_dut(cls, port, **kwargs):
inst = None
try:
expected_rom_class = cls._get_rom()
except NotImplementedError:
expected_rom_class = None
try:
# TODO: check whether 8266 works with this logic
# Otherwise overwrite it in ESP8266DUT
inst = esptool.ESPLoader.detect_chip(port)
if expected_rom_class and type(inst) != expected_rom_class:
raise RuntimeError("Target not expected")
return inst.read_mac() is not None, get_target_by_rom_class(type(inst))
except(esptool.FatalError, RuntimeError):
return False, None
finally:
if inst is not None:
inst._port.close()
@_uses_esptool
def _try_flash(self, esp, erase_nvs, baud_rate):
"""
Called by start_app() to try flashing at a particular baud rate.
Structured this way so @_uses_esptool will reconnect each time
"""
try:
# note: opening here prevents us from having to seek back to 0 each time
flash_files = [(offs, open(path, "rb")) for (offs, path) in self.app.flash_files]
if erase_nvs:
address = self.app.partition_table["nvs"]["offset"]
size = self.app.partition_table["nvs"]["size"]
nvs_file = tempfile.TemporaryFile()
nvs_file.write(b'\xff' * size)
nvs_file.seek(0)
flash_files.append((int(address, 0), nvs_file))
# fake flasher args object, this is a hack until
# esptool Python API is improved
class FlashArgs(object):
def __init__(self, attributes):
for key, value in attributes.items():
self.__setattr__(key, value)
flash_args = FlashArgs({
'flash_size': self.app.flash_settings["flash_size"],
'flash_mode': self.app.flash_settings["flash_mode"],
'flash_freq': self.app.flash_settings["flash_freq"],
'addr_filename': flash_files,
'no_stub': False,
'compress': True,
'verify': False,
'encrypt': self.app.flash_settings.get("encrypt", False),
'erase_all': False,
})
esp.change_baud(baud_rate)
esptool.detect_flash_size(esp, flash_args)
esptool.write_flash(esp, flash_args)
finally:
for (_, f) in flash_files:
f.close()
def start_app(self, erase_nvs=ERASE_NVS):
"""
download and start app.
:param: erase_nvs: whether erase NVS partition during flash
:return: None
"""
for baud_rate in [921600, 115200]:
try:
self._try_flash(erase_nvs, baud_rate)
break
except RuntimeError:
continue
else:
raise IDFToolError()
@_uses_esptool
def reset(self, esp):
"""
hard reset DUT
:return: None
"""
# decorator `_use_esptool` will do reset
# so we don't need to do anything in this method
pass
@_uses_esptool
def erase_partition(self, esp, partition):
"""
:param partition: partition name to erase
:return: None
"""
raise NotImplementedError() # TODO: implement this
# address = self.app.partition_table[partition]["offset"]
size = self.app.partition_table[partition]["size"]
# TODO can use esp.erase_region() instead of this, I think
with open(".erase_partition.tmp", "wb") as f:
f.write(chr(0xFF) * size)
@_uses_esptool
def dump_flush(self, esp, output_file, **kwargs):
"""
dump flush
:param output_file: output file name, if relative path, will use sdk path as base path.
:keyword partition: partition name, dump the partition.
``partition`` is preferred than using ``address`` and ``size``.
:keyword address: dump from address (need to be used with size)
:keyword size: dump size (need to be used with address)
:return: None
"""
if os.path.isabs(output_file) is False:
output_file = os.path.relpath(output_file, self.app.get_log_folder())
if "partition" in kwargs:
partition = self.app.partition_table[kwargs["partition"]]
_address = partition["offset"]
_size = partition["size"]
elif "address" in kwargs and "size" in kwargs:
_address = kwargs["address"]
_size = kwargs["size"]
else:
raise IDFToolError("You must specify 'partition' or ('address' and 'size') to dump flash")
content = esp.read_flash(_address, _size)
with open(output_file, "wb") as f:
f.write(content)
@classmethod
def list_available_ports(cls):
ports = [x.device for x in list_ports.comports()]
espport = os.getenv('ESPPORT')
if not espport:
# It's a little hard filter out invalid port with `serial.tools.list_ports.grep()`:
# The check condition in `grep` is: `if r.search(port) or r.search(desc) or r.search(hwid)`.
# This means we need to make all 3 conditions fail, to filter out the port.
# So some part of the filters will not be straight forward to users.
# And negative regular expression (`^((?!aa|bb|cc).)*$`) is not easy to understand.
# Filter out invalid port by our own will be much simpler.
return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)]
# On MacOs with python3.6: type of espport is already utf8
if isinstance(espport, type(u'')):
port_hint = espport
else:
port_hint = espport.decode('utf8')
# If $ESPPORT is a valid port, make it appear first in the list
if port_hint in ports:
ports.remove(port_hint)
return [port_hint] + ports
# On macOS, user may set ESPPORT to /dev/tty.xxx while
# pySerial lists only the corresponding /dev/cu.xxx port
if sys.platform == 'darwin' and 'tty.' in port_hint:
port_hint = port_hint.replace('tty.', 'cu.')
if port_hint in ports:
ports.remove(port_hint)
return [port_hint] + ports
return ports
def lookup_pc_address(self, pc_addr):
cmd = ["%saddr2line" % self.TOOLCHAIN_PREFIX,
"-pfiaC", "-e", self.app.elf_file, pc_addr]
ret = ""
try:
translation = subprocess.check_output(cmd)
ret = translation.decode()
except OSError:
pass
return ret
@staticmethod
def _queue_read_all(source_queue):
output = []
while True:
try:
output.append(source_queue.get(timeout=0))
except _queue.Empty:
break
return output
def _queue_copy(self, source_queue, dest_queue):
data = self._queue_read_all(source_queue)
for d in data:
dest_queue.put(d)
def _get_from_queue(self, queue_name):
self_queue = getattr(self, queue_name)
if self.receive_thread:
recv_thread_queue = getattr(self.receive_thread, queue_name)
self._queue_copy(recv_thread_queue, self_queue)
return self._queue_read_all(self_queue)
def stop_receive(self):
if self.receive_thread:
for name in ["performance_items", "exceptions"]:
source_queue = getattr(self.receive_thread, name)
dest_queue = getattr(self, name)
self._queue_copy(source_queue, dest_queue)
super(IDFDUT, self).stop_receive()
def get_exceptions(self):
""" Get exceptions detected by DUT receive thread. """
return self._get_from_queue("exceptions")
def get_performance_items(self):
"""
DUT receive thread will automatic collect performance results with pattern ``[Performance][name]: value\n``.
This method is used to get all performance results.
:return: a list of performance items.
"""
return self._get_from_queue("performance_items")
def close(self):
super(IDFDUT, self).close()
if not self.allow_dut_exception and self.get_exceptions():
Utility.console_log("DUT exception detected on {}".format(self), color="red")
raise IDFDUTException()
class ESP32DUT(IDFDUT):
TARGET = "esp32"
TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
@classmethod
def _get_rom(cls):
return esptool.ESP32ROM
class ESP32S2DUT(IDFDUT):
TARGET = "esp32s2beta"
TOOLCHAIN_PREFIX = "xtensa-esp32s2-elf-"
@classmethod
def _get_rom(cls):
return esptool.ESP32S2ROM
class ESP8266DUT(IDFDUT):
TARGET = "esp8266"
TOOLCHAIN_PREFIX = "xtensa-lx106-elf-"
@classmethod
def _get_rom(cls):
return esptool.ESP8266ROM
def get_target_by_rom_class(cls):
for c in [ESP32DUT, ESP32S2DUT, ESP8266DUT]:
if c._get_rom() == cls:
return c.TARGET
return None

View File

@@ -0,0 +1,132 @@
# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from tiny_test_fw import TinyFW, Utility
from IDFApp import IDFApp, Example, UT
from IDFDUT import IDFDUT, ESP32DUT, ESP32S2DUT, ESP8266DUT
def format_case_id(chip, case_name):
return "{}.{}".format(chip, case_name)
def idf_example_test(app=Example, dut=IDFDUT, chip="ESP32", module="examples", execution_time=1,
level="example", erase_nvs=True, config_name=None, **kwargs):
"""
decorator for testing idf examples (with default values for some keyword args).
:param app: test application class
:param dut: dut class
:param chip: chip supported, string or tuple
:param module: module, string
:param execution_time: execution time in minutes, int
:param level: test level, could be used to filter test cases, string
:param erase_nvs: if need to erase_nvs in DUT.start_app()
:param config_name: if specified, name of the app configuration
:param kwargs: other keyword args
:return: test method
"""
try:
# try to config the default behavior of erase nvs
dut.ERASE_NVS = erase_nvs
except AttributeError:
pass
original_method = TinyFW.test_method(app=app, dut=dut, chip=chip, module=module,
execution_time=execution_time, level=level, **kwargs)
def test(func):
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(chip, test_func.case_info["name"])
return test_func
return test
def idf_unit_test(app=UT, dut=IDFDUT, chip="ESP32", module="unit-test", execution_time=1,
level="unit", erase_nvs=True, **kwargs):
"""
decorator for testing idf unit tests (with default values for some keyword args).
:param app: test application class
:param dut: dut class
:param chip: chip supported, string or tuple
:param module: module, string
:param execution_time: execution time in minutes, int
:param level: test level, could be used to filter test cases, string
:param erase_nvs: if need to erase_nvs in DUT.start_app()
:param kwargs: other keyword args
:return: test method
"""
try:
# try to config the default behavior of erase nvs
dut.ERASE_NVS = erase_nvs
except AttributeError:
pass
original_method = TinyFW.test_method(app=app, dut=dut, chip=chip, module=module,
execution_time=execution_time, level=level, **kwargs)
def test(func):
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(chip, test_func.case_info["name"])
return test_func
return test
def log_performance(item, value):
"""
do print performance with pre-defined format to console
:param item: performance item name
:param value: performance value
"""
performance_msg = "[Performance][{}]: {}".format(item, value)
Utility.console_log(performance_msg, "orange")
# update to junit test report
current_junit_case = TinyFW.JunitReport.get_current_test_case()
current_junit_case.stdout += performance_msg + "\r\n"
def check_performance(item, value):
"""
check if idf performance meet pass standard
:param item: performance item name
:param value: performance item value
:raise: AssertionError: if check fails
"""
ret = True
standard_value = 0
idf_path = IDFApp.get_sdk_path()
performance_file = os.path.join(idf_path, "components", "idf_test", "include", "idf_performance.h")
if os.path.exists(performance_file):
with open(performance_file, "r") as f:
data = f.read()
match = re.search(r"#define\s+IDF_PERFORMANCE_(MIN|MAX)_{}\s+([\d.]+)".format(item.upper()), data)
if match:
op = match.group(1)
standard_value = float(match.group(2))
if op == "MAX":
ret = value <= standard_value
else:
ret = value >= standard_value
if not ret:
raise AssertionError("[Performance] {} value is {}, doesn't meet pass standard {}"
.format(item, value, standard_value))