tiny-test-fw: fix misc bugs:

1. configs are not functional
    * decorator will be executed when search case, need to set default config before search case.
2. fix DUT encode/decode errors
    * python3 serial don't support write string/unicode, need to convert to bytes first.
    * python2 string could failed to encode/decode non-acsii chars
3. fix bug that log folder not created
4. fix bug that test suite name is not correctly passed:
    * the keyward arg should be `test_suite_name` not `test_name`
5. fix bug that test stopped on failed case
6. fix DUT `read` don't return all data cache
    * `_DataCache.get_data` should first copy all data from queue to data cache and then return to user.
7. fix bug that `expect_all` failed even all expected item passed
8. optimize error info for expect
    * print pattern for regular expression when match failed
9. fix bug that set default config doesn't work
This commit is contained in:
He Yin Ling
2018-01-16 22:16:03 +08:00
committed by bot
parent 98d1f05ab5
commit 2b068f3ceb
8 changed files with 62 additions and 29 deletions

View File

@@ -85,6 +85,14 @@ def _decode_data(data):
return data
def _pattern_to_string(pattern):
try:
ret = "RegEx: " + pattern.pattern
except AttributeError:
ret = pattern
return ret
class _DataCache(_queue.Queue):
"""
Data cache based on Queue. Allow users to process data cache based on bytes instead of Queue."
@@ -94,6 +102,21 @@ class _DataCache(_queue.Queue):
_queue.Queue.__init__(self, maxsize=maxsize)
self.data_cache = str()
def _move_from_queue_to_cache(self):
"""
move all of the available data in the queue to cache
:return: True if moved any item from queue to data cache, else False
"""
ret = False
while True:
try:
self.data_cache += _decode_data(self.get(0))
ret = True
except _queue.Empty:
break
return ret
def get_data(self, timeout=0):
"""
get a copy of data from cache.
@@ -105,12 +128,16 @@ class _DataCache(_queue.Queue):
if timeout < 0:
timeout = 0
try:
data = self.get(timeout=timeout)
self.data_cache += _decode_data(data)
except _queue.Empty:
# don't do anything when on update for cache
pass
ret = self._move_from_queue_to_cache()
if not ret:
# we only wait for new data if we can't provide a new data_cache
try:
data = self.get(timeout=timeout)
self.data_cache += _decode_data(data)
except _queue.Empty:
# don't do anything when on update for cache
pass
return copy.deepcopy(self.data_cache)
def flush(self, index=0xFFFFFFFF):
@@ -417,7 +444,7 @@ class BaseDUT(object):
data = self.data_cache.get_data(time.time() + timeout - start_time)
if ret is None:
raise ExpectTimeout(self.name + ": " + str(pattern))
raise ExpectTimeout(self.name + ": " + _pattern_to_string(pattern))
return ret
def _expect_multi(self, expect_all, expect_item_list, timeout):
@@ -457,12 +484,11 @@ class BaseDUT(object):
if expect_item["ret"] is not None:
# match succeed for one item
matched_expect_items.append(expect_item)
break
# if expect all, then all items need to be matched,
# else only one item need to matched
if expect_all:
match_succeed = (matched_expect_items == expect_items)
match_succeed = len(matched_expect_items) == len(expect_items)
else:
match_succeed = True if matched_expect_items else False
@@ -482,7 +508,7 @@ class BaseDUT(object):
# flush already matched data
self.data_cache.flush(slice_index)
else:
raise ExpectTimeout(self.name + ": " + str(expect_items))
raise ExpectTimeout(self.name + ": " + str([_pattern_to_string(x) for x in expect_items]))
@_expect_lock
def expect_any(self, *expect_items, **timeout):