mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-09 12:35:28 +00:00
ldgen: implement flags support
Implement support for KEEP, ALIGN, emitting symbols and SORT. Add appropriate tests Defines default mapping in linker fragment file
This commit is contained in:
@@ -192,3 +192,8 @@ entries:
|
||||
[scheme:wifi_slp_rx_iram]
|
||||
entries:
|
||||
wifi_slp_rx_iram -> iram0_text
|
||||
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
@@ -157,3 +157,8 @@ entries:
|
||||
[scheme:wifi_slp_rx_iram]
|
||||
entries:
|
||||
wifi_slp_rx_iram -> iram0_text
|
||||
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
@@ -142,3 +142,8 @@ entries:
|
||||
[scheme:wifi_slp_rx_iram]
|
||||
entries:
|
||||
wifi_slp_rx_iram -> iram0_text
|
||||
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
@@ -157,3 +157,8 @@ entries:
|
||||
[scheme:wifi_slp_rx_iram]
|
||||
entries:
|
||||
wifi_slp_rx_iram -> iram0_text
|
||||
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
@@ -17,9 +17,12 @@ import abc
|
||||
import os
|
||||
import re
|
||||
from collections import namedtuple
|
||||
from enum import Enum
|
||||
|
||||
from pyparsing import (Combine, Forward, Group, Literal, OneOrMore, Optional, ParseFatalException, Suppress, Word,
|
||||
ZeroOrMore, alphanums, alphas, indentedBlock, originalTextFor, restOfLine)
|
||||
from entity import Entity
|
||||
from pyparsing import (Combine, Forward, Group, Keyword, Literal, OneOrMore, Optional, Or, ParseFatalException,
|
||||
Suppress, Word, ZeroOrMore, alphanums, alphas, delimitedList, indentedBlock, nums,
|
||||
originalTextFor, restOfLine)
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
KeyGrammar = namedtuple('KeyGrammar', 'grammar min max required')
|
||||
@@ -267,11 +270,131 @@ class Mapping(Fragment):
|
||||
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
|
||||
"""
|
||||
|
||||
MAPPING_ALL_OBJECTS = '*'
|
||||
class Flag():
|
||||
PRE_POST = (Optional(Suppress(',') + Suppress('pre').setParseAction(lambda: True).setResultsName('pre')) +
|
||||
Optional(Suppress(',') + Suppress('post').setParseAction(lambda: True).setResultsName('post')))
|
||||
|
||||
class Emit(Flag):
|
||||
|
||||
def __init__(self, symbol, pre=True, post=True):
|
||||
self.symbol = symbol
|
||||
self.pre = pre
|
||||
self.post = post
|
||||
|
||||
@staticmethod
|
||||
def get_grammar():
|
||||
# emit(symbol [, pre, post])
|
||||
#
|
||||
# __symbol_start, __symbol_end is generated before and after
|
||||
# the corresponding input section description, respectively.
|
||||
grammar = (Keyword('emit').suppress() +
|
||||
Suppress('(') +
|
||||
Fragment.IDENTIFIER.setResultsName('symbol') +
|
||||
Mapping.Flag.PRE_POST +
|
||||
Suppress(')'))
|
||||
|
||||
def on_parse(tok):
|
||||
if tok.pre == '' and tok.post == '':
|
||||
res = Mapping.Emit(tok.symbol)
|
||||
elif tok.pre != '' and tok.post == '':
|
||||
res = Mapping.Emit(tok.symbol, tok.pre, False)
|
||||
elif tok.pre == '' and tok.post != '':
|
||||
res = Mapping.Emit(tok.symbol, False, tok.post)
|
||||
else:
|
||||
res = Mapping.Emit(tok.symbol, tok.pre, tok.post)
|
||||
return res
|
||||
|
||||
grammar.setParseAction(on_parse)
|
||||
return grammar
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, Mapping.Emit) and
|
||||
self.symbol == other.symbol and
|
||||
self.pre == other.pre and
|
||||
self.post == other.post)
|
||||
|
||||
class Align(Flag):
|
||||
|
||||
def __init__(self, alignment, pre=True, post=False):
|
||||
self.alignment = alignment
|
||||
self.pre = pre
|
||||
self.post = post
|
||||
|
||||
@staticmethod
|
||||
def get_grammar():
|
||||
# align(alignment, [, pre, post])
|
||||
grammar = (Keyword('align').suppress() +
|
||||
Suppress('(') +
|
||||
Word(nums).setResultsName('alignment') +
|
||||
Mapping.Flag.PRE_POST +
|
||||
Suppress(')'))
|
||||
|
||||
def on_parse(tok):
|
||||
alignment = int(tok.alignment)
|
||||
if tok.pre == '' and tok.post == '':
|
||||
res = Mapping.Align(alignment)
|
||||
elif tok.pre != '' and tok.post == '':
|
||||
res = Mapping.Align(alignment, tok.pre)
|
||||
elif tok.pre == '' and tok.post != '':
|
||||
res = Mapping.Align(alignment, False, tok.post)
|
||||
else:
|
||||
res = Mapping.Align(alignment, tok.pre, tok.post)
|
||||
return res
|
||||
|
||||
grammar.setParseAction(on_parse)
|
||||
return grammar
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, Mapping.Align) and
|
||||
self.alignment == other.alignment and
|
||||
self.pre == other.pre and
|
||||
self.post == other.post)
|
||||
|
||||
class Keep(Flag):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_grammar():
|
||||
grammar = Keyword('keep').setParseAction(Mapping.Keep)
|
||||
return grammar
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Mapping.Keep)
|
||||
|
||||
class Sort(Flag):
|
||||
class Type(Enum):
|
||||
NAME = 0
|
||||
ALIGNMENT = 1
|
||||
INIT_PRIORITY = 2
|
||||
|
||||
def __init__(self, first, second=None):
|
||||
self.first = first
|
||||
self.second = second
|
||||
|
||||
@staticmethod
|
||||
def get_grammar():
|
||||
# sort(sort_by_first [, sort_by_second])
|
||||
keywords = Keyword('name') | Keyword('alignment') | Keyword('init_priority')
|
||||
grammar = (Keyword('sort').suppress() + Suppress('(') +
|
||||
keywords.setResultsName('first') +
|
||||
Optional(Suppress(',') + keywords.setResultsName('second')) + Suppress(')'))
|
||||
|
||||
grammar.setParseAction(lambda tok: Mapping.Sort(tok.first, tok.second if tok.second != '' else None))
|
||||
return grammar
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, Mapping.Sort) and
|
||||
self.first == other.first and
|
||||
self.second == other.second)
|
||||
|
||||
def __init__(self):
|
||||
Fragment.__init__(self)
|
||||
self.entries = set()
|
||||
# k = (obj, symbol, scheme)
|
||||
# v = list((section, target), Mapping.Flag))
|
||||
self.flags = dict()
|
||||
self.deprecated = False
|
||||
|
||||
def set_key_value(self, key, parse_results):
|
||||
@@ -283,40 +406,63 @@ class Mapping(Fragment):
|
||||
symbol = None
|
||||
scheme = None
|
||||
|
||||
try:
|
||||
obj = result['object']
|
||||
except KeyError:
|
||||
pass
|
||||
obj = result['object']
|
||||
|
||||
try:
|
||||
symbol = result['symbol']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
scheme = result['scheme']
|
||||
except KeyError:
|
||||
pass
|
||||
scheme = result['scheme']
|
||||
|
||||
self.entries.add((obj, symbol, scheme))
|
||||
mapping = (obj, symbol, scheme)
|
||||
self.entries.add(mapping)
|
||||
|
||||
try:
|
||||
parsed_flags = result['sections_target_flags']
|
||||
except KeyError:
|
||||
parsed_flags = []
|
||||
|
||||
if parsed_flags:
|
||||
entry_flags = []
|
||||
for pf in parsed_flags:
|
||||
entry_flags.append((pf.sections, pf.target, list(pf.flags)))
|
||||
|
||||
try:
|
||||
existing_flags = self.flags[mapping]
|
||||
except KeyError:
|
||||
existing_flags = list()
|
||||
self.flags[mapping] = existing_flags
|
||||
|
||||
existing_flags.extend(entry_flags)
|
||||
|
||||
def get_key_grammars(self):
|
||||
# There are three possible patterns for mapping entries:
|
||||
# obj:symbol (scheme)
|
||||
# obj (scheme)
|
||||
# * (scheme)
|
||||
# Flags can be specified for section->target in the scheme specified, ex:
|
||||
# obj (scheme); section->target emit(symbol), section2->target2 align(4)
|
||||
obj = Fragment.ENTITY.setResultsName('object')
|
||||
symbol = Suppress(':') + Fragment.IDENTIFIER.setResultsName('symbol')
|
||||
scheme = Suppress('(') + Fragment.IDENTIFIER.setResultsName('scheme') + Suppress(')')
|
||||
|
||||
pattern1 = obj + symbol + scheme
|
||||
pattern2 = obj + scheme
|
||||
pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName('object') + scheme
|
||||
# The flags are specified for section->target in the scheme specified
|
||||
sections_target = Scheme.grammars['entries'].grammar
|
||||
|
||||
entry = pattern1 | pattern2 | pattern3
|
||||
flag = Or([f.get_grammar() for f in [Mapping.Keep, Mapping.Align, Mapping.Emit, Mapping.Sort]])
|
||||
|
||||
section_target_flags = Group(sections_target + Group(OneOrMore(flag)).setResultsName('flags'))
|
||||
|
||||
pattern1 = obj + symbol
|
||||
pattern2 = obj
|
||||
pattern3 = Literal(Entity.ALL).setResultsName('object')
|
||||
|
||||
entry = ((pattern1 | pattern2 | pattern3) + scheme +
|
||||
Optional(Suppress(';') + delimitedList(section_target_flags).setResultsName('sections_target_flags')))
|
||||
|
||||
grammars = {
|
||||
'archive': KeyGrammar(Fragment.ENTITY.setResultsName('archive'), 1, 1, True),
|
||||
'archive': KeyGrammar(Or([Fragment.ENTITY, Word(Entity.ALL)]).setResultsName('archive'), 1, 1, True),
|
||||
'entries': KeyGrammar(entry, 0, None, True)
|
||||
}
|
||||
|
||||
@@ -330,7 +476,6 @@ class DeprecatedMapping():
|
||||
|
||||
# Name of the default condition entry
|
||||
DEFAULT_CONDITION = 'default'
|
||||
MAPPING_ALL_OBJECTS = '*'
|
||||
|
||||
@staticmethod
|
||||
def get_fragment_grammar(sdkconfig, fragment_file):
|
||||
@@ -348,7 +493,7 @@ class DeprecatedMapping():
|
||||
|
||||
pattern1 = Group(obj + symbol + scheme)
|
||||
pattern2 = Group(obj + scheme)
|
||||
pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName('object') + scheme)
|
||||
pattern3 = Group(Literal(Entity.ALL).setResultsName('object') + scheme)
|
||||
|
||||
mapping_entry = pattern1 | pattern2 | pattern3
|
||||
|
||||
|
@@ -22,72 +22,83 @@ from collections import namedtuple
|
||||
from entity import Entity
|
||||
from fragments import Mapping, Scheme, Sections
|
||||
from ldgen_common import LdGenFailure
|
||||
from output_commands import InputSectionDesc
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
|
||||
|
||||
class RuleNode():
|
||||
class Placement():
|
||||
|
||||
class Section():
|
||||
def __init__(self, node, sections, target, flags, explicit, force=False, dryrun=False):
|
||||
self.node = node
|
||||
self.sections = sections
|
||||
self.target = target
|
||||
self.flags = flags
|
||||
|
||||
def __init__(self, target, exclusions, explicit=False):
|
||||
self.target = target
|
||||
self.exclusions = set(exclusions)
|
||||
self.exclusions = set()
|
||||
self.subplacements = set()
|
||||
|
||||
# Indicate whether this node has been created explicitly from a mapping,
|
||||
# or simply just to create a path to the explicitly created node.
|
||||
#
|
||||
# For example,
|
||||
#
|
||||
# lib.a
|
||||
# obj:sym (scheme)
|
||||
#
|
||||
# Nodes for lib.a and obj will be created, but only the node for
|
||||
# sym will have been created explicitly.
|
||||
#
|
||||
# This is used in deciding whether or not an output command should
|
||||
# be emitted for this node, or for exclusion rule generation.
|
||||
self.explicit = explicit
|
||||
# Force this placement to be output
|
||||
self.force = force
|
||||
|
||||
def __init__(self, parent, name, sections):
|
||||
# This placement was created from a mapping
|
||||
# fragment entry.
|
||||
self.explicit = explicit
|
||||
|
||||
# Find basis placement. A basis placement is a placement
|
||||
# on the parent (or parent's parent and so on and so forth)
|
||||
# that operates on the same section as this one.
|
||||
parent = node.parent
|
||||
candidate = None
|
||||
while parent:
|
||||
try:
|
||||
candidate = parent.placements[sections]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if candidate and candidate.is_significant():
|
||||
break
|
||||
else:
|
||||
parent = parent.parent
|
||||
|
||||
self.basis = candidate
|
||||
|
||||
if self.is_significant() and not dryrun and self.basis:
|
||||
self.basis.add_exclusion(self)
|
||||
|
||||
def is_significant(self):
|
||||
# Check if the placement is significant. Significant placements
|
||||
# are the end of a basis chain (not self.basis) or a change
|
||||
# in target (self.target != self.basis.target)
|
||||
#
|
||||
# Placement can also be a basis if it has flags
|
||||
# (self.flags) or its basis has flags (self.basis.flags)
|
||||
return (not self.basis or
|
||||
self.target != self.basis.target or
|
||||
(self.flags and not self.basis.flags) or
|
||||
(not self.flags and self.basis.flags) or
|
||||
self.force)
|
||||
|
||||
def force_significant(self):
|
||||
if not self.is_significant():
|
||||
self.force = True
|
||||
if self.basis:
|
||||
self.basis.add_exclusion(self)
|
||||
|
||||
def add_exclusion(self, exclusion):
|
||||
self.exclusions.add(exclusion)
|
||||
|
||||
def add_subplacement(self, subplacement):
|
||||
self.subplacements.add(subplacement)
|
||||
|
||||
|
||||
class EntityNode():
|
||||
|
||||
def __init__(self, parent, name):
|
||||
self.children = []
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.child_node = None
|
||||
self.child_t = EntityNode
|
||||
self.entity = None
|
||||
|
||||
self.sections = dict()
|
||||
|
||||
# A node inherits the section -> target entries from
|
||||
# its parent. This is to simplify logic, avoiding
|
||||
# going up the parental chain to try a 'basis' rule
|
||||
# in creating exclusions. This relies on the fact that
|
||||
# the mappings must be inserted from least to most specific.
|
||||
# This sort is done in generate_rules().
|
||||
if sections:
|
||||
for (s, v) in sections.items():
|
||||
self.sections[s] = RuleNode.Section(v.target, [], [])
|
||||
|
||||
def add_exclusion(self, sections, exclusion):
|
||||
self.sections[sections].exclusions.add(exclusion)
|
||||
|
||||
# Recursively create exclusions in parents
|
||||
if self.parent:
|
||||
self.exclude_from_parent(sections)
|
||||
|
||||
def add_sections(self, sections, target):
|
||||
try:
|
||||
_sections = self.sections[sections]
|
||||
if not _sections.explicit:
|
||||
_sections.target = target
|
||||
_sections.explicit = True
|
||||
else:
|
||||
if target != _sections.target:
|
||||
raise GenerationException('Sections mapped to multiple targets')
|
||||
except KeyError:
|
||||
self.sections[sections] = RuleNode.Section(target, [], True)
|
||||
|
||||
def exclude_from_parent(self, sections):
|
||||
self.parent.add_exclusion(sections, self.entity)
|
||||
self.placements = dict()
|
||||
|
||||
def add_child(self, entity):
|
||||
child_specificity = self.entity.specificity.value + 1
|
||||
@@ -99,7 +110,7 @@ class RuleNode():
|
||||
assert(len(child) <= 1)
|
||||
|
||||
if not child:
|
||||
child = self.child_node(self, name, self.sections)
|
||||
child = self.child_t(self, name)
|
||||
self.children.append(child)
|
||||
else:
|
||||
child = child[0]
|
||||
@@ -125,151 +136,176 @@ class RuleNode():
|
||||
|
||||
return commands
|
||||
|
||||
def add_node_child(self, entity, sections, target, sections_db):
|
||||
def get_node_output_commands(self):
|
||||
commands = collections.defaultdict(list)
|
||||
|
||||
for sections in self.get_output_sections():
|
||||
placement = self.placements[sections]
|
||||
if placement.is_significant():
|
||||
assert(placement.node == self)
|
||||
|
||||
keep = False
|
||||
sort = None
|
||||
surround = []
|
||||
|
||||
placement_flags = placement.flags if placement.flags is not None else []
|
||||
|
||||
for flag in placement_flags:
|
||||
if isinstance(flag, Mapping.Keep):
|
||||
keep = True
|
||||
elif isinstance(flag, Mapping.Sort):
|
||||
sort = (flag.first, flag.second)
|
||||
else: # emit or align
|
||||
surround.append(flag)
|
||||
|
||||
for flag in surround:
|
||||
if flag.pre:
|
||||
if isinstance(flag, Mapping.Emit):
|
||||
commands[placement.target].append(SymbolAtAddress('_%s_start' % flag.symbol))
|
||||
else: # align
|
||||
commands[placement.target].append(AlignAtAddress(flag.alignment))
|
||||
|
||||
# This is for expanded object node and symbol node placements without checking for
|
||||
# the type.
|
||||
placement_sections = frozenset(placement.sections)
|
||||
command_sections = sections if sections == placement_sections else placement_sections
|
||||
|
||||
command = InputSectionDesc(placement.node.entity, command_sections, [e.node.entity for e in placement.exclusions], keep, sort)
|
||||
commands[placement.target].append(command)
|
||||
|
||||
# Generate commands for intermediate, non-explicit exclusion placements here, so that they can be enclosed by
|
||||
# flags that affect the parent placement.
|
||||
for subplacement in placement.subplacements:
|
||||
if not subplacement.flags and not subplacement.explicit:
|
||||
command = InputSectionDesc(subplacement.node.entity, subplacement.sections,
|
||||
[e.node.entity for e in subplacement.exclusions], keep, sort)
|
||||
commands[placement.target].append(command)
|
||||
|
||||
for flag in surround:
|
||||
if flag.post:
|
||||
if isinstance(flag, Mapping.Emit):
|
||||
commands[placement.target].append(SymbolAtAddress('_%s_end' % flag.symbol))
|
||||
else: # align
|
||||
commands[placement.target].append(AlignAtAddress(flag.alignment))
|
||||
|
||||
return commands
|
||||
|
||||
def self_placement(self, sections, target, flags, explicit=True, force=False):
|
||||
placement = Placement(self, sections, target, flags, explicit, force)
|
||||
self.placements[sections] = placement
|
||||
return placement
|
||||
|
||||
def child_placement(self, entity, sections, target, flags, sections_db):
|
||||
child = self.add_child(entity)
|
||||
child.insert(entity, sections, target, sections_db)
|
||||
child.insert(entity, sections, target, flags, sections_db)
|
||||
|
||||
def get_node_output_commands(self):
|
||||
commands = collections.defaultdict(list)
|
||||
|
||||
for sections in self.get_section_keys():
|
||||
info = self.sections[sections]
|
||||
if info.exclusions or info.explicit:
|
||||
command = InputSectionDesc(self.entity, sections, info.exclusions)
|
||||
commands[info.target].append(command)
|
||||
|
||||
return commands
|
||||
|
||||
def insert(self, entity, sections, target, sections_db):
|
||||
def insert(self, entity, sections, target, flags, sections_db):
|
||||
if self.entity.specificity == entity.specificity:
|
||||
if self.parent.sections[sections].target != target:
|
||||
self.add_sections(sections, target)
|
||||
self.exclude_from_parent(sections)
|
||||
# Since specificities match, create the placement in this node.
|
||||
self.self_placement(sections, target, flags)
|
||||
else:
|
||||
self.add_node_child(entity, sections, target, sections_db)
|
||||
# If not, create a child node and try to create the placement there.
|
||||
self.child_placement(entity, sections, target, flags, sections_db)
|
||||
|
||||
def get_section_keys(self):
|
||||
return sorted(self.sections.keys(), key=' '.join)
|
||||
def get_output_sections(self):
|
||||
return sorted(self.placements.keys(), key=' '.join)
|
||||
|
||||
|
||||
class SymbolNode(RuleNode):
|
||||
class SymbolNode(EntityNode):
|
||||
|
||||
def __init__(self, parent, name, sections):
|
||||
RuleNode.__init__(self, parent, name, sections)
|
||||
self.entity = Entity(self.parent.parent.name, self.parent.name, self.name)
|
||||
|
||||
def insert(self, entity, sections, target, sections_db):
|
||||
self.add_sections(sections, target)
|
||||
|
||||
def get_node_output_commands(self):
|
||||
commands = collections.defaultdict(list)
|
||||
|
||||
for sections in self.get_section_keys():
|
||||
info = self.sections[sections]
|
||||
if info.explicit:
|
||||
command = InputSectionDesc(Entity(self.parent.parent.name, self.parent.name), sections, [])
|
||||
commands[info.target].append(command)
|
||||
|
||||
return commands
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.entity = Entity(self.parent.parent.name, self.parent.name)
|
||||
|
||||
|
||||
class ObjectNode(RuleNode):
|
||||
class ObjectNode(EntityNode):
|
||||
|
||||
def __init__(self, parent, name, sections):
|
||||
RuleNode.__init__(self, parent, name, sections)
|
||||
self.child_node = SymbolNode
|
||||
self.expanded_sections = dict()
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.child_t = SymbolNode
|
||||
self.entity = Entity(self.parent.name, self.name)
|
||||
self.subplacements = list()
|
||||
|
||||
def add_node_child(self, entity, sections, target, sections_db):
|
||||
if self.sections[sections].target != target:
|
||||
symbol = entity.symbol
|
||||
match_sections = None
|
||||
|
||||
obj_sections = sections_db.get_sections(self.parent.name, self.name)
|
||||
def child_placement(self, entity, sections, target, flags, sections_db):
|
||||
child = self.add_child(entity)
|
||||
sym_placement = Placement(child, sections, target, flags, True, dryrun=True)
|
||||
|
||||
# The basis placement for sym_placement can either be
|
||||
# an existing placement on this node, or nonexistent.
|
||||
if sym_placement.is_significant():
|
||||
try:
|
||||
match_sections = self.expanded_sections[sections]
|
||||
obj_sections = self.placements[sections].sections
|
||||
except KeyError:
|
||||
match_sections = []
|
||||
obj_sections = None
|
||||
|
||||
if not obj_sections or obj_sections == sections:
|
||||
# Expand this section for the first time
|
||||
found_sections = sections_db.get_sections(self.parent.name, self.name)
|
||||
obj_sections = []
|
||||
for s in sections:
|
||||
match_sections.extend(fnmatch.filter(obj_sections, s))
|
||||
obj_sections.extend(fnmatch.filter(found_sections, s))
|
||||
|
||||
if match_sections:
|
||||
if obj_sections:
|
||||
symbol = entity.symbol
|
||||
remove_sections = [s.replace('.*', '.%s' % symbol) for s in sections if '.*' in s]
|
||||
filtered_sections = [s for s in match_sections if s not in remove_sections]
|
||||
filtered_sections = [s for s in obj_sections if s not in remove_sections]
|
||||
|
||||
if set(filtered_sections) != set(match_sections): # some sections removed
|
||||
child = self.add_child(entity)
|
||||
child.insert(entity, frozenset(remove_sections), target, obj_sections)
|
||||
if set(filtered_sections) != set(obj_sections):
|
||||
if sym_placement.basis:
|
||||
subplace = False
|
||||
try:
|
||||
# If existing placement exists, make sure that
|
||||
# it is emitted.
|
||||
obj_placement = self.placements[sections]
|
||||
except KeyError:
|
||||
# Create intermediate placement.
|
||||
obj_placement = self.self_placement(sections, sym_placement.basis.target, None, False)
|
||||
if obj_placement.basis.flags:
|
||||
subplace = True
|
||||
|
||||
# Remember the result for node command generation
|
||||
self.expanded_sections[sections] = filtered_sections
|
||||
self.exclude_from_parent(sections)
|
||||
if subplace:
|
||||
obj_placement.basis.add_subplacement(obj_placement)
|
||||
self.subplacements.append(sections)
|
||||
else:
|
||||
obj_placement.force_significant()
|
||||
|
||||
def get_node_output_commands(self):
|
||||
commands = collections.defaultdict(list)
|
||||
obj_placement.sections = filtered_sections
|
||||
sym_placement.basis = obj_placement
|
||||
|
||||
for sections in self.get_section_keys():
|
||||
info = self.sections[sections]
|
||||
sym_placement.sections = remove_sections
|
||||
child.placements[sections] = sym_placement
|
||||
|
||||
try:
|
||||
match_sections = self.expanded_sections[sections]
|
||||
except KeyError:
|
||||
match_sections = []
|
||||
|
||||
if match_sections or info.explicit:
|
||||
command_sections = match_sections if match_sections else sections
|
||||
command = InputSectionDesc(self.entity, command_sections, [])
|
||||
commands[info.target].append(command)
|
||||
|
||||
return commands
|
||||
|
||||
def exclude_from_parent(self, sections):
|
||||
# Check if there is an explicit emmission for the parent node, which is an archive node.
|
||||
# If there is, make the exclusion there. If not, make the exclusion on the root node.
|
||||
# This is to avoid emitting unecessary command and exclusions for the archive node and
|
||||
# from the root node, respectively.
|
||||
if self.parent.sections[sections].explicit:
|
||||
self.parent.add_exclusion(sections, self.entity)
|
||||
else:
|
||||
self.parent.parent.add_exclusion(sections, self.entity)
|
||||
def get_output_sections(self):
|
||||
output_sections = [key for key in self.placements if key not in self.subplacements]
|
||||
return sorted(output_sections, key=' '.join)
|
||||
|
||||
|
||||
class ArchiveNode(RuleNode):
|
||||
class ArchiveNode(EntityNode):
|
||||
|
||||
def __init__(self, parent, name, sections):
|
||||
RuleNode.__init__(self, parent, name, sections)
|
||||
self.child_node = ObjectNode
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.child_t = ObjectNode
|
||||
self.entity = Entity(self.name)
|
||||
|
||||
|
||||
class RootNode(RuleNode):
|
||||
class RootNode(EntityNode):
|
||||
def __init__(self):
|
||||
RuleNode.__init__(self, None, Entity.ALL, None)
|
||||
self.child_node = ArchiveNode
|
||||
EntityNode.__init__(self, None, Entity.ALL)
|
||||
self.child_t = ArchiveNode
|
||||
self.entity = Entity('*')
|
||||
|
||||
def insert(self, entity, sections, target, sections_db):
|
||||
if self.entity.specificity == entity.specificity:
|
||||
self.add_sections(sections, target)
|
||||
else:
|
||||
self.add_node_child(entity, sections, target, sections_db)
|
||||
|
||||
|
||||
class Generation:
|
||||
"""
|
||||
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
|
||||
Implements generation of placement based on collected sections, scheme and mapping fragment.
|
||||
"""
|
||||
|
||||
DEFAULT_SCHEME = 'default'
|
||||
|
||||
# Processed mapping, scheme and section entries
|
||||
EntityMapping = namedtuple('EntityMapping', 'entity sections_group target')
|
||||
EntityMapping = namedtuple('EntityMapping', 'entity sections_group target flags')
|
||||
|
||||
def __init__(self, check_mappings=False, check_mapping_exceptions=None):
|
||||
self.schemes = {}
|
||||
self.sections = {}
|
||||
self.placements = {}
|
||||
self.mappings = {}
|
||||
|
||||
self.check_mappings = check_mappings
|
||||
@@ -279,7 +315,7 @@ class Generation:
|
||||
else:
|
||||
self.check_mapping_exceptions = []
|
||||
|
||||
def _build_scheme_dictionary(self):
|
||||
def _prepare_scheme_dictionary(self):
|
||||
scheme_dictionary = collections.defaultdict(dict)
|
||||
|
||||
# Collect sections into buckets based on target name
|
||||
@@ -292,7 +328,7 @@ class Generation:
|
||||
sections_in_bucket = sections_bucket[target_name]
|
||||
|
||||
try:
|
||||
sections = self.sections[sections_name]
|
||||
sections = self.placements[sections_name]
|
||||
except KeyError:
|
||||
message = GenerationException.UNDEFINED_REFERENCE + " to sections '" + sections_name + "'."
|
||||
raise GenerationException(message, scheme)
|
||||
@@ -324,12 +360,13 @@ class Generation:
|
||||
|
||||
return scheme_dictionary
|
||||
|
||||
def get_section_strs(self, section):
|
||||
s_list = [Sections.get_section_data_from_entry(s) for s in section.entries]
|
||||
return frozenset([item for sublist in s_list for item in sublist])
|
||||
def _prepare_entity_mappings(self, scheme_dictionary, entities):
|
||||
# Prepare entity mappings processed from mapping fragment entries.
|
||||
def get_section_strs(section):
|
||||
s_list = [Sections.get_section_data_from_entry(s) for s in section.entries]
|
||||
return frozenset([item for sublist in s_list for item in sublist])
|
||||
|
||||
def _generate_entity_mappings(self, scheme_dictionary, entities):
|
||||
entity_mappings = []
|
||||
entity_mappings = dict()
|
||||
|
||||
for mapping in self.mappings.values():
|
||||
archive = mapping.archive
|
||||
@@ -345,45 +382,77 @@ class Generation:
|
||||
message = "'%s' not found" % str(entity)
|
||||
raise GenerationException(message, mapping)
|
||||
|
||||
# Create placement rule for each 'section -> target' in the scheme.
|
||||
#
|
||||
# For example. for the mapping entry:
|
||||
#
|
||||
# obj (scheme)
|
||||
#
|
||||
# The enumrated to:
|
||||
#
|
||||
# obj (section1 -> target1)
|
||||
# obj (section2 -> target2)
|
||||
# ...
|
||||
if (obj, symbol, scheme_name) in mapping.flags.keys():
|
||||
flags = mapping.flags[(obj, symbol, scheme_name)]
|
||||
# Check if all section->target defined in the current
|
||||
# scheme.
|
||||
for (s, t, f) in flags:
|
||||
if (t not in scheme_dictionary[scheme_name].keys() or
|
||||
s not in [_s.name for _s in scheme_dictionary[scheme_name][t]]):
|
||||
|
||||
message = "%s->%s not defined in scheme '%s'" % (s, t, scheme_name)
|
||||
raise GenerationException(message, mapping)
|
||||
else:
|
||||
flags = None
|
||||
|
||||
# Create placement for each 'section -> target' in the scheme.
|
||||
for (target, sections) in scheme_dictionary[scheme_name].items():
|
||||
for section in sections:
|
||||
entity_mappings.append(Generation.EntityMapping(entity, self.get_section_strs(section), target))
|
||||
# Find the applicable flags
|
||||
_flags = []
|
||||
|
||||
return entity_mappings
|
||||
if flags:
|
||||
for (s, t, f) in flags:
|
||||
if (s, t) == (section.name, target):
|
||||
_flags.extend(f)
|
||||
|
||||
def generate_rules(self, entities):
|
||||
scheme_dictionary = self._build_scheme_dictionary()
|
||||
sections_str = get_section_strs(section)
|
||||
|
||||
entity_mappings = self._generate_entity_mappings(scheme_dictionary, entities)
|
||||
key = (entity, section.name)
|
||||
|
||||
entity_mappings.sort(key=lambda m: m.entity)
|
||||
try:
|
||||
existing = entity_mappings[key]
|
||||
except KeyError:
|
||||
existing = None
|
||||
|
||||
# Create root nodes dictionary for the default scheme, whose
|
||||
# key is the target name and value is a list of the root nodes for that target.
|
||||
if not existing:
|
||||
entity_mappings[key] = Generation.EntityMapping(entity, sections_str, target, _flags)
|
||||
else:
|
||||
# Check for conflicts.
|
||||
if (target != existing.target):
|
||||
raise GenerationException('Sections mapped to multiple targets.', mapping)
|
||||
|
||||
# Combine flags here if applicable, to simplify
|
||||
# insertion logic.
|
||||
if (_flags or existing.flags):
|
||||
if ((_flags and not existing.flags) or (not _flags and existing.flags)):
|
||||
_flags.extend(existing.flags)
|
||||
entity_mappings[key] = Generation.EntityMapping(entity,
|
||||
sections_str,
|
||||
target, _flags)
|
||||
elif (_flags == existing.flags):
|
||||
pass
|
||||
else:
|
||||
raise GenerationException('Conflicting flags specified.', mapping)
|
||||
|
||||
# Sort the mappings by specificity, so as to simplify
|
||||
# insertion logic.
|
||||
res = list(entity_mappings.values())
|
||||
res.sort(key=lambda m: m.entity)
|
||||
return res
|
||||
|
||||
def generate(self, entities):
|
||||
scheme_dictionary = self._prepare_scheme_dictionary()
|
||||
entity_mappings = self._prepare_entity_mappings(scheme_dictionary, entities)
|
||||
root_node = RootNode()
|
||||
for (target, sections) in scheme_dictionary['default'].items():
|
||||
for section in sections:
|
||||
root_node.insert(Entity(), self.get_section_strs(section), target, entities)
|
||||
|
||||
for mapping in entity_mappings:
|
||||
(entity, sections, target) = mapping
|
||||
(entity, sections, target, flags) = mapping
|
||||
try:
|
||||
root_node.insert(entity, sections, target, entities)
|
||||
root_node.insert(entity, sections, target, flags, entities)
|
||||
except ValueError as e:
|
||||
raise GenerationException(str(e))
|
||||
|
||||
# Traverse the tree, creating the rules
|
||||
# Traverse the tree, creating the placements
|
||||
commands = root_node.get_output_commands()
|
||||
|
||||
return commands
|
||||
@@ -398,7 +467,7 @@ class Generation:
|
||||
if isinstance(fragment, Scheme):
|
||||
dict_to_append_to = self.schemes
|
||||
elif isinstance(fragment, Sections):
|
||||
dict_to_append_to = self.sections
|
||||
dict_to_append_to = self.placements
|
||||
else:
|
||||
dict_to_append_to = self.mappings
|
||||
|
||||
|
@@ -151,7 +151,7 @@ def main():
|
||||
raise LdGenFailure('failed to parse %s\n%s' % (fragment_file.name, str(e)))
|
||||
generation_model.add_fragments_from_file(fragment_file)
|
||||
|
||||
mapping_rules = generation_model.generate_rules(sections_infos)
|
||||
mapping_rules = generation_model.generate(sections_infos)
|
||||
|
||||
script_model = LinkerScript(input_file)
|
||||
script_model.fill(mapping_rules)
|
||||
|
@@ -17,9 +17,35 @@
|
||||
from entity import Entity
|
||||
|
||||
|
||||
class AlignAtAddress():
|
||||
|
||||
def __init__(self, alignment):
|
||||
self.alignment = alignment
|
||||
|
||||
def __str__(self):
|
||||
return ('. = ALIGN(%d);' % self.alignment)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, AlignAtAddress) and
|
||||
self.alignment == other.alignment)
|
||||
|
||||
|
||||
class SymbolAtAddress():
|
||||
|
||||
def __init__(self, symbol):
|
||||
self.symbol = symbol
|
||||
|
||||
def __str__(self):
|
||||
return ('%s = ABSOLUTE(.);' % self.symbol)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, SymbolAtAddress) and
|
||||
self.symbol == other.symbol)
|
||||
|
||||
|
||||
class InputSectionDesc():
|
||||
|
||||
def __init__(self, entity, sections, exclusions=None):
|
||||
def __init__(self, entity, sections, exclusions=None, keep=False, sort=None):
|
||||
assert(entity.specificity != Entity.Specificity.SYMBOL)
|
||||
|
||||
self.entity = entity
|
||||
@@ -34,7 +60,12 @@ class InputSectionDesc():
|
||||
else:
|
||||
self.exclusions = set()
|
||||
|
||||
self.keep = keep
|
||||
self.sort = sort
|
||||
|
||||
def __str__(self):
|
||||
sections_string = '( )'
|
||||
|
||||
if self.sections:
|
||||
exclusion_strings = []
|
||||
|
||||
@@ -57,22 +88,48 @@ class InputSectionDesc():
|
||||
for section in sorted(self.sections):
|
||||
section_strings.append(section)
|
||||
|
||||
sections_string = '(%s)' % ' '.join(section_strings)
|
||||
else:
|
||||
sections_string = '( )'
|
||||
if self.sort:
|
||||
if self.sort == (None, None):
|
||||
pattern = 'SORT(%s)'
|
||||
elif self.sort == ('name', None):
|
||||
pattern = 'SORT_BY_NAME(%s)'
|
||||
elif self.sort == ('alignment', None):
|
||||
pattern = 'SORT_BY_ALIGNMENT(%s)'
|
||||
elif self.sort == ('init_priority', None):
|
||||
pattern = 'SORT_BY_INIT_PRIORITY(%s)'
|
||||
elif self.sort == ('name', 'alignment'):
|
||||
pattern = 'SORT_BY_NAME(SORT_BY_ALIGNMENT(%s))'
|
||||
elif self.sort == ('alignment', 'name'):
|
||||
pattern = 'SORT_BY_ALIGNMENT(SORT_BY_NAME(%s))'
|
||||
elif self.sort == ('name', 'name'):
|
||||
pattern = 'SORT_BY_NAME(SORT_BY_NAME(%s))'
|
||||
elif self.sort == ('alignment', 'alignment'):
|
||||
pattern = 'SORT_BY_ALIGNMENT(SORT_BY_ALIGNMENT(%s))'
|
||||
else:
|
||||
raise Exception('Invalid sort arguments')
|
||||
|
||||
command = None
|
||||
section_strings = [(pattern % s) for s in section_strings]
|
||||
|
||||
sections_string = '(%s)' % ' '.join(section_strings)
|
||||
|
||||
if self.entity.specificity == Entity.Specificity.NONE:
|
||||
command = '*%s' % (sections_string)
|
||||
entry = '*%s' % (sections_string)
|
||||
elif self.entity.specificity == Entity.Specificity.ARCHIVE:
|
||||
command = '*%s:%s' % (self.entity.archive, sections_string)
|
||||
entry = '*%s:%s' % (self.entity.archive, sections_string)
|
||||
else:
|
||||
command = '*%s:%s.*%s' % (self.entity.archive, self.entity.obj, sections_string)
|
||||
entry = '*%s:%s.*%s' % (self.entity.archive, self.entity.obj, sections_string)
|
||||
|
||||
return command
|
||||
if self.keep:
|
||||
res = 'KEEP(%s)' % entry
|
||||
else:
|
||||
res = entry
|
||||
|
||||
return res
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.entity == other.entity and
|
||||
return (isinstance(other, InputSectionDesc) and
|
||||
self.entity == other.entity and
|
||||
self.sections == other.sections and
|
||||
self.exclusions == other.exclusions)
|
||||
self.exclusions == other.exclusions and
|
||||
self.keep == other.keep and
|
||||
self.sort == other.sort)
|
||||
|
@@ -82,3 +82,8 @@ entries:
|
||||
[scheme:noflash_data]
|
||||
entries:
|
||||
rodata -> dram0_data
|
||||
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
@@ -23,11 +23,11 @@ from io import StringIO
|
||||
from pyparsing import ParseException, ParseFatalException, Word, alphanums
|
||||
|
||||
try:
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar, Mapping
|
||||
from sdkconfig import SDKConfig
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar, Mapping
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
|
||||
@@ -811,6 +811,208 @@ entries:
|
||||
with self.assertRaises(ParseException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_keep_flag(self):
|
||||
# Test parsing combinations and orders of flags
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text keep,
|
||||
rodata->flash_rodata keep keep
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Keep()]),
|
||||
('rodata', 'flash_rodata', [Mapping.Keep(), Mapping.Keep()])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_align_flag(self):
|
||||
# Test parsing combinations and orders of flags
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text align(8),
|
||||
rodata->flash_rodata align(8, pre),
|
||||
data->dram0_data align(8, pre, post),
|
||||
bss->dram0_bss align(8, post),
|
||||
common->dram0_bss align(8, pre, post) align(8)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Align(8, True, False)]),
|
||||
('rodata', 'flash_rodata', [Mapping.Align(8, True, False)]),
|
||||
('data', 'dram0_data', [Mapping.Align(8, True, True)]),
|
||||
('bss', 'dram0_bss', [Mapping.Align(8, False, True)]),
|
||||
('common', 'dram0_bss', [Mapping.Align(8, True, True), Mapping.Align(8, True, False)])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
# Wrong post, pre order
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (noflash)
|
||||
text->iram0_text align(8, post, pre)
|
||||
""")
|
||||
|
||||
with self.assertRaises(ParseFatalException):
|
||||
FragmentFile(test_fragment, self.sdkconfig)
|
||||
|
||||
def test_sort_flag(self):
|
||||
# Test parsing combinations and orders of flags
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text sort(name),
|
||||
rodata->flash_rodata sort(alignment),
|
||||
data->dram0_data sort(init_priority),
|
||||
bss->dram0_bss sort(name, alignment),
|
||||
common->dram0_bss sort(alignment, name),
|
||||
iram->iram0_text sort(name, name),
|
||||
dram->dram0_data sort(alignment, alignment)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Sort('name')]),
|
||||
('rodata', 'flash_rodata', [Mapping.Sort('alignment')]),
|
||||
('data', 'dram0_data', [Mapping.Sort('init_priority')]),
|
||||
('bss', 'dram0_bss', [Mapping.Sort('name', 'alignment')]),
|
||||
('common', 'dram0_bss', [Mapping.Sort('alignment', 'name')]),
|
||||
('iram', 'iram0_text', [Mapping.Sort('name', 'name')]),
|
||||
('dram', 'dram0_data', [Mapping.Sort('alignment', 'alignment')])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default)
|
||||
text->iram0_text sort(name) sort(alignment)
|
||||
""")
|
||||
|
||||
def test_emit_flag(self):
|
||||
# Test parsing combinations and orders of flags
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text emit(sym1),
|
||||
rodata->flash_rodata emit(sym2, pre),
|
||||
data->dram0_data emit(sym3, post),
|
||||
bss->dram0_bss emit(sym4, pre, post),
|
||||
common->dram0_bss emit(sym5, pre, post) emit(sym6)
|
||||
""")
|
||||
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Emit('sym1', True, True)]),
|
||||
('rodata', 'flash_rodata', [Mapping.Emit('sym2', True, False)]),
|
||||
('data', 'dram0_data', [Mapping.Emit('sym3', False, True)]),
|
||||
('bss', 'dram0_bss', [Mapping.Emit('sym4', True, True)]),
|
||||
('common', 'dram0_bss', [Mapping.Emit('sym5', True, True), Mapping.Emit('sym6', True, True)])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_flag_order(self):
|
||||
# Test that the order in which the flags are specified is retained
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text align(4) keep emit(sym1) align(8) sort(name),
|
||||
rodata->flash_rodata keep align(4) keep emit(sym1) align(8) align(4) sort(name)
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Align(8, True, False),
|
||||
Mapping.Sort('name')]),
|
||||
('rodata', 'flash_rodata', [Mapping.Keep(),
|
||||
Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Align(8, True, False),
|
||||
Mapping.Align(4, True, False),
|
||||
Mapping.Sort('name')])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_flags_entries_multiple_flags(self):
|
||||
# Not an error, generation step handles this, since
|
||||
# it that step has a more complete information
|
||||
# about all mappings.
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text align(4) keep emit(sym1) sort(name),
|
||||
text->flash_text align(4) keep emit(sym1) sort(name)
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Sort('name')]),
|
||||
('text', 'flash_text', [Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Sort('name')])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_flags_entries_multiple_flags_and_entries(self):
|
||||
# Not an error, generation step handles this, since
|
||||
# it that step has a more complete information
|
||||
# about all mappings. This can happen across multiple
|
||||
# mapping fragments.
|
||||
test_fragment = self.create_fragment_file(u"""
|
||||
[mapping:map]
|
||||
archive: libmain.a
|
||||
entries:
|
||||
obj1 (default);
|
||||
text->flash_text align(4) keep emit(sym1) sort(name)
|
||||
obj1 (default);
|
||||
text->flash_text align(4) keep emit(sym1) sort(name)
|
||||
""")
|
||||
fragment_file = FragmentFile(test_fragment, self.sdkconfig)
|
||||
fragment = fragment_file.fragments[0]
|
||||
|
||||
expected = [('text', 'flash_text', [Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Sort('name')]),
|
||||
('text', 'flash_text', [Mapping.Align(4, True, False),
|
||||
Mapping.Keep(),
|
||||
Mapping.Emit('sym1', True, True),
|
||||
Mapping.Sort('name')])]
|
||||
actual = fragment.flags[('obj1', None, 'default')]
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
|
||||
class DeprecatedMappingTest(FragmentTest):
|
||||
|
||||
|
@@ -33,7 +33,7 @@ from io import StringIO
|
||||
from entity import Entity, EntityDB
|
||||
from fragments import FragmentFile
|
||||
from linker_script import LinkerScript
|
||||
from output_commands import InputSectionDesc
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
ROOT = Entity('*')
|
||||
@@ -120,29 +120,30 @@ class GenerationTest(unittest.TestCase):
|
||||
self.assertEqual(set(expected.keys()), set(actual.keys()))
|
||||
|
||||
for target in sorted(actual.keys()):
|
||||
message = 'failed target %s' % target
|
||||
a_cmds = actual[target]
|
||||
e_cmds = expected[target]
|
||||
|
||||
self.assertEqual(len(a_cmds), len(e_cmds))
|
||||
self.assertEqual(len(a_cmds), len(e_cmds), message)
|
||||
|
||||
for a, e in zip(a_cmds, e_cmds):
|
||||
self.assertEqual(a, e)
|
||||
self.assertEqual(a, e, message)
|
||||
|
||||
def get_default(self, target, rules):
|
||||
return rules[target][0]
|
||||
|
||||
|
||||
class DefaultMappingTest(GenerationTest):
|
||||
|
||||
def test_rule_generation_default(self):
|
||||
# Checks that default rules are generated from
|
||||
# the default scheme properly and even if no mappings
|
||||
# are defined.
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
|
||||
class DefaultMappingTest(GenerationTest):
|
||||
|
||||
def test_default_mapping_lib(self):
|
||||
# Mapping a library with default mapping. This should not emit additional rules,
|
||||
# other than the default ones.
|
||||
@@ -249,7 +250,7 @@ entries:
|
||||
* (noflash) #1
|
||||
"""
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -287,7 +288,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -326,7 +327,7 @@ entries:
|
||||
croutine:prvCheckPendingReadyList (noflash) #1
|
||||
"""
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -382,7 +383,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -443,7 +444,7 @@ entries:
|
||||
croutine:prvCheckPendingReadyList (default) #2
|
||||
"""
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -512,7 +513,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -606,7 +607,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -634,6 +635,40 @@ entries:
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_root_mapping_fragment(self):
|
||||
# Test creation of a mapping fragment that maps '*'.
|
||||
# This should generate another default command in iram0_text:
|
||||
#
|
||||
# iram0_text
|
||||
# * (.custom_section) A
|
||||
# * (.iram .iram.*)
|
||||
mapping = u"""
|
||||
[sections:custom_section]
|
||||
entries:
|
||||
.custom_section
|
||||
|
||||
[scheme:custom_scheme]
|
||||
entries:
|
||||
custom_section -> iram0_text
|
||||
|
||||
[mapping:default2]
|
||||
archive: *
|
||||
entries:
|
||||
* (custom_scheme) #1
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
# Generate default command A
|
||||
# Since these are the same 'specificity', the commands
|
||||
# are arranged alphabetically.
|
||||
expected['iram0_text'].append(expected['iram0_text'][0])
|
||||
expected['iram0_text'][0] = InputSectionDesc(ROOT, ['.custom_section'], [])
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
|
||||
class AdvancedTest(GenerationTest):
|
||||
|
||||
@@ -671,7 +706,7 @@ entries:
|
||||
croutine (noflash_data) #2
|
||||
"""
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -725,7 +760,7 @@ entries:
|
||||
croutine (noflash_data) #2
|
||||
"""
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -764,7 +799,7 @@ entries:
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
|
||||
with self.assertRaises(GenerationException):
|
||||
self.generation.generate_rules(self.entities)
|
||||
self.generation.generate(self.entities)
|
||||
|
||||
def test_complex_mapping_case(self, alt=None):
|
||||
# Test a complex case where an object is mapped using
|
||||
@@ -818,7 +853,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -907,7 +942,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -1004,7 +1039,7 @@ entries:
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -1046,7 +1081,7 @@ entries:
|
||||
self.add_fragments(mapping)
|
||||
|
||||
with self.assertRaises(GenerationException):
|
||||
self.generation.generate_rules(self.entities)
|
||||
self.generation.generate(self.entities)
|
||||
|
||||
def test_disambiguated_obj(self):
|
||||
# Test command generation for disambiguated entry. Should produce similar
|
||||
@@ -1059,7 +1094,7 @@ entries:
|
||||
"""
|
||||
port = Entity('libfreertos.a', 'port.c')
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
@@ -1085,6 +1120,60 @@ entries:
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_root_mapping_fragment_conflict(self):
|
||||
# Test that root mapping fragments are also checked for
|
||||
# conflicts.
|
||||
#
|
||||
# 'custom_scheme' entries conflict the 'default' scheme
|
||||
# entries.
|
||||
mapping = u"""
|
||||
[scheme:custom_scheme]
|
||||
entries:
|
||||
flash_text -> iram0_text
|
||||
|
||||
[mapping:default2]
|
||||
archive: *
|
||||
entries:
|
||||
* (custom_scheme)
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
with self.assertRaises(GenerationException):
|
||||
self.generation.generate(self.entities)
|
||||
|
||||
def test_root_mapping_fragment_duplicate(self):
|
||||
# Same root mappings have no effect.
|
||||
#
|
||||
# custom_scheme has the 'iram -> iram0_text' in common with
|
||||
# default scheme
|
||||
mapping = u"""
|
||||
[sections:custom_section]
|
||||
entries:
|
||||
.custom_section
|
||||
|
||||
[scheme:custom_scheme]
|
||||
entries:
|
||||
iram -> iram0_text
|
||||
custom_section -> iram0_text
|
||||
|
||||
[mapping:default2]
|
||||
archive: *
|
||||
entries:
|
||||
* (custom_scheme)
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
# Generate default command A
|
||||
# Since these are the same 'specificity', the commands
|
||||
# are arranged alphabetically.
|
||||
expected['iram0_text'].append(expected['iram0_text'][0])
|
||||
expected['iram0_text'][0] = InputSectionDesc(ROOT, ['.custom_section'], [])
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
|
||||
class ConfigTest(GenerationTest):
|
||||
# Test command generation with conditions
|
||||
@@ -1119,7 +1208,7 @@ entries:
|
||||
self.add_fragments(scheme)
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
if perf >= 1:
|
||||
@@ -1145,6 +1234,11 @@ entries:
|
||||
# Test that proper commands are generated
|
||||
# in conditional mapping entries.
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
||||
[mapping:test]
|
||||
archive: lib.a
|
||||
entries:
|
||||
@@ -1165,7 +1259,7 @@ entries:
|
||||
self.generation.mappings = {}
|
||||
self.add_fragments(alt if alt else mapping)
|
||||
|
||||
actual = self.generation.generate_rules(self.entities)
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
if perf_level < 4 and perf_level > 0:
|
||||
@@ -1208,6 +1302,11 @@ entries:
|
||||
def test_conditional_entries_legacy_mapping_fragment(self):
|
||||
# Test conditional entries on legacy mapping fragment grammar.
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
@@ -1228,6 +1327,11 @@ entries:
|
||||
# Test conditional entries on legacy mapping fragment grammar
|
||||
# across multiple fragments.
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
entries:
|
||||
@@ -1257,6 +1361,11 @@ entries:
|
||||
# Test conditional entries on new mapping fragment grammar.
|
||||
# across multiple fragments.
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
* (default)
|
||||
|
||||
[mapping:base]
|
||||
archive: lib.a
|
||||
entries:
|
||||
@@ -1282,5 +1391,442 @@ entries:
|
||||
self.test_conditional_mapping(mapping)
|
||||
|
||||
|
||||
class FlagTest(GenerationTest):
|
||||
|
||||
# Test correct generation of mapping fragment entries
|
||||
# with flags.
|
||||
|
||||
def test_flags_basics(self):
|
||||
# Test that input section commands additions are done (keep, sort).
|
||||
# Test that order dependent commands are properly generated (align, emit)
|
||||
# Normally, if an entry has the same mapping as parent, commands.
|
||||
# are not emitted for them. However, if there are flags, they should be -
|
||||
# only for the scheme entries that have flags, though.
|
||||
# Flag entries split across multiple entries work.
|
||||
#
|
||||
# flash_text
|
||||
# *((EXCLUDE_FILE(libfreertos:timers libfreertos:croutine).text ...) A
|
||||
# KEEP(* (SORT_BY_NAME(EXCLUDE_FILE(libfreertos:timers).text) ...) B
|
||||
#
|
||||
# flash_rodata
|
||||
# *((EXCLUDE_FILE(libfreertos:timers) .rodata ...) C
|
||||
# _sym2_start D.1
|
||||
# . = ALIGN(4) E.1
|
||||
# KEEP(* (EXCLUDE_FILE(libfreertos:timers) .rodata ...) F
|
||||
# _sym2_end D.2
|
||||
# . = ALIGN(4) E.2
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# . = ALIGN(4) G.1
|
||||
# _sym1_start H.1
|
||||
# libfreertos.a:croutine(.text .literal ...) I
|
||||
# . = ALIGN(4) G.2
|
||||
# _sym1_end H.2
|
||||
mapping = u"""
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
croutine (noflash_text);
|
||||
text->iram0_text align(4, pre, post) emit(sym1, pre, post) #1
|
||||
timers (default);
|
||||
text->flash_text keep sort(name) #2
|
||||
timers (default);
|
||||
rodata->flash_rodata emit(sym2, pre, post) align(4, pre, post) #3
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
flash_rodata = expected['flash_rodata']
|
||||
|
||||
# Exclusions in flash_text for timers and croutine A
|
||||
flash_text[0].exclusions.add(CROUTINE)
|
||||
flash_text[0].exclusions.add(TIMERS)
|
||||
|
||||
# Command for #3 B
|
||||
flash_text.append(InputSectionDesc(TIMERS, flash_text[0].sections, [], keep=True, sort=('name', None)))
|
||||
|
||||
# Exclusions in flash_rodata for timers C
|
||||
flash_rodata[0].exclusions.add(TIMERS)
|
||||
|
||||
# Commands for #3 D.1, E.1, F, D.2, E.2
|
||||
flash_rodata.append(SymbolAtAddress('_sym2_start'))
|
||||
flash_rodata.append(AlignAtAddress(4))
|
||||
flash_rodata.append(InputSectionDesc(TIMERS, flash_rodata[0].sections, []))
|
||||
flash_rodata.append(SymbolAtAddress('_sym2_end'))
|
||||
flash_rodata.append(AlignAtAddress(4))
|
||||
|
||||
# Commands for # G.1, H.1, I, G.2, H.2
|
||||
iram0_text.append(AlignAtAddress(4))
|
||||
iram0_text.append(SymbolAtAddress('_sym1_start'))
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, flash_text[0].sections, []))
|
||||
iram0_text.append(AlignAtAddress(4))
|
||||
iram0_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_intermediate_exclusion_command_root(self):
|
||||
# Test that intermediate exclusion commands from root-level commands
|
||||
# are included in the flags.
|
||||
#
|
||||
# flash_text
|
||||
# _sym1_start A.1
|
||||
# KEEP(* (EXCLUDE_FILE(libfreertos:croutine).text ...) B
|
||||
# KEEP(libfreertos.a:croutine(...))) C
|
||||
# _sym1_end A.2
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# libfreertos.a:croutine(.text.prvCheckPendingReadyList ...) D
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
# 1
|
||||
* (default);
|
||||
text->flash_text emit(sym1) keep #2
|
||||
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
croutine:prvCheckPendingReadyList (noflash_text) #3
|
||||
"""
|
||||
|
||||
self.generation.mappings = {}
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
|
||||
# Command for #2, pre A.1
|
||||
flash_text.insert(0, SymbolAtAddress('_sym1_start'))
|
||||
|
||||
# Command for #1 with keep B
|
||||
# and exclusion for #3
|
||||
flash_text[1].keep = True
|
||||
flash_text[1].exclusions.add(CROUTINE)
|
||||
|
||||
# Implicit exclusion command for #3 C
|
||||
croutine_sections = self.entities.get_sections('libfreertos.a', 'croutine')
|
||||
filtered_sections = fnmatch.filter(croutine_sections, '.literal.*')
|
||||
filtered_sections.extend(fnmatch.filter(croutine_sections, '.text.*'))
|
||||
|
||||
filtered_sections = [s for s in filtered_sections if not s.endswith('prvCheckPendingReadyList')]
|
||||
filtered_sections.append('.text')
|
||||
flash_text.append(InputSectionDesc(CROUTINE, set(filtered_sections), [], keep=True))
|
||||
|
||||
# Command for #2, post A.2
|
||||
flash_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
# Command for #3 D
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, set(['.text.prvCheckPendingReadyList', '.literal.prvCheckPendingReadyList']), []))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_intermediate_exclusion_command_lib(self):
|
||||
# Test that intermediate exclusion commands from lib-level commands
|
||||
# are included in the flags.
|
||||
#
|
||||
# flash_text
|
||||
# *(EXCLUDE_FILE(libfreertos.a).text ...)
|
||||
# _sym1_start A.1
|
||||
# KEEP(libfreertos.a(EXCLUDE_FILE(libfreertos:croutine).text.* ...)) B
|
||||
# KEEP(libfreertos.a:croutine(...))) C
|
||||
# _sym1_end A.2
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# libfreertos.a:croutine(.text.prvCheckPendingReadyList ...) D
|
||||
mapping = u"""
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
# 1
|
||||
* (default);
|
||||
text->flash_text emit(sym1) keep #2
|
||||
croutine:prvCheckPendingReadyList (noflash_text) #3
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
|
||||
# Command for #2, pre A.1
|
||||
flash_text.append(SymbolAtAddress('_sym1_start'))
|
||||
flash_text[0].exclusions.add(FREERTOS)
|
||||
|
||||
# Command for #1 with keep B
|
||||
# and exclusion for #3
|
||||
flash_text.append(InputSectionDesc(FREERTOS, flash_text[0].sections, [CROUTINE], keep=True))
|
||||
|
||||
# Implicit exclusion command for #3 C
|
||||
croutine_sections = self.entities.get_sections('libfreertos.a', 'croutine')
|
||||
filtered_sections = fnmatch.filter(croutine_sections, '.literal.*')
|
||||
filtered_sections.extend(fnmatch.filter(croutine_sections, '.text.*'))
|
||||
|
||||
filtered_sections = [s for s in filtered_sections if not s.endswith('prvCheckPendingReadyList')]
|
||||
filtered_sections.append('.text')
|
||||
flash_text.append(InputSectionDesc(CROUTINE, set(filtered_sections), [], keep=True))
|
||||
|
||||
# Command for #2, post A.2
|
||||
flash_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
# Command for #3 C
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, set(['.text.prvCheckPendingReadyList', '.literal.prvCheckPendingReadyList']), []))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_intermediate_exclusion_command_obj(self):
|
||||
# Test that intermediate exclusion commands from obj-level commands
|
||||
# are included in the flags.
|
||||
#
|
||||
# flash_text
|
||||
# *(EXCLUDE_FILE(libfreertos.a).text ...)
|
||||
# _sym1_start A.1
|
||||
# KEEP(libfreertos.a:croutine(...))) B
|
||||
# _sym1_end A.2
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# libfreertos.a:croutine(.text.prvCheckPendingReadyList ...) C
|
||||
mapping = u"""
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
# 1
|
||||
croutine (default);
|
||||
text->flash_text emit(sym1) keep #2
|
||||
croutine:prvCheckPendingReadyList (noflash_text) #3
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
|
||||
# Command for #2, pre A.1
|
||||
flash_text.append(SymbolAtAddress('_sym1_start'))
|
||||
flash_text[0].exclusions.add(CROUTINE)
|
||||
|
||||
# Implicit exclusion command for #3 B
|
||||
croutine_sections = self.entities.get_sections('libfreertos.a', 'croutine')
|
||||
filtered_sections = fnmatch.filter(croutine_sections, '.literal.*')
|
||||
filtered_sections.extend(fnmatch.filter(croutine_sections, '.text.*'))
|
||||
|
||||
filtered_sections = [s for s in filtered_sections if not s.endswith('prvCheckPendingReadyList')]
|
||||
filtered_sections.append('.text')
|
||||
flash_text.append(InputSectionDesc(CROUTINE, set(filtered_sections), [], keep=True))
|
||||
|
||||
# Command for #2, post A.2
|
||||
flash_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
# Command for #3 C
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, set(['.text.prvCheckPendingReadyList', '.literal.prvCheckPendingReadyList']), []))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_separate_exclusion_command_if_explicit_root(self):
|
||||
# Explicit commands are separated from the parent's flags.
|
||||
#
|
||||
# flash_text
|
||||
# _sym1_start A.1
|
||||
# KEEP(* (EXCLUDE_FILE(libfreertos:croutine).text ...) B
|
||||
# _sym1_end A.2
|
||||
# KEEP(libfreertos.a:croutine(...))) C
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# libfreertos.a:croutine(.text.prvCheckPendingReadyList ...) D
|
||||
mapping = u"""
|
||||
[mapping:default]
|
||||
archive: *
|
||||
entries:
|
||||
# 1
|
||||
* (default);
|
||||
text->flash_text emit(sym1) keep #2
|
||||
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
croutine (default) #3
|
||||
croutine:prvCheckPendingReadyList (noflash_text) #4
|
||||
"""
|
||||
|
||||
self.generation.mappings = {}
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
|
||||
# Command for #2, pre A.1
|
||||
flash_text.insert(0, SymbolAtAddress('_sym1_start'))
|
||||
|
||||
# Command for #1 with keep B
|
||||
# and exclusion for #3
|
||||
flash_text[1].keep = True
|
||||
flash_text[1].exclusions.add(CROUTINE)
|
||||
|
||||
# Command for #2, post A.2
|
||||
flash_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
# Command for #3 C
|
||||
croutine_sections = self.entities.get_sections('libfreertos.a', 'croutine')
|
||||
filtered_sections = fnmatch.filter(croutine_sections, '.literal.*')
|
||||
filtered_sections.extend(fnmatch.filter(croutine_sections, '.text.*'))
|
||||
|
||||
filtered_sections = [s for s in filtered_sections if not s.endswith('prvCheckPendingReadyList')]
|
||||
filtered_sections.append('.text')
|
||||
flash_text.append(InputSectionDesc(CROUTINE, set(filtered_sections), []))
|
||||
|
||||
# Command for #4 D
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, set(['.text.prvCheckPendingReadyList', '.literal.prvCheckPendingReadyList']), []))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_separate_exclusion_command_if_explicit_lib(self):
|
||||
# Explicit commands are separated from the parent's flags.
|
||||
#
|
||||
# flash_text
|
||||
# *(EXCLUDE_FILE(libfreertos.a).text ...)
|
||||
# _sym1_start A.1
|
||||
# KEEP(libfreertos.a(EXCLUDE_FILE(libfreertos:croutine).text.* ...)) B
|
||||
# _sym1_end A.2
|
||||
# KEEP(libfreertos.a:croutine(...))) C
|
||||
#
|
||||
# iram0_text
|
||||
# *(.iram .iram.*)
|
||||
# libfreertos.a:croutine(.text.prvCheckPendingReadyList ...) D
|
||||
mapping = u"""
|
||||
[mapping:test]
|
||||
archive: libfreertos.a
|
||||
entries:
|
||||
# 1
|
||||
* (default);
|
||||
text->flash_text emit(sym1) keep
|
||||
croutine (default) #2
|
||||
croutine:prvCheckPendingReadyList (noflash_text) #3
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
iram0_text = expected['iram0_text']
|
||||
|
||||
# Command for #2, pre A.1
|
||||
flash_text.append(SymbolAtAddress('_sym1_start'))
|
||||
flash_text[0].exclusions.add(FREERTOS)
|
||||
|
||||
# Command for #1 with keep B
|
||||
# and exclusion for #3
|
||||
flash_text.append(InputSectionDesc(FREERTOS, flash_text[0].sections, [CROUTINE], keep=True))
|
||||
|
||||
# Command for #2, post A.2
|
||||
flash_text.append(SymbolAtAddress('_sym1_end'))
|
||||
|
||||
# Implicit exclusion command for #3 C
|
||||
croutine_sections = self.entities.get_sections('libfreertos.a', 'croutine')
|
||||
filtered_sections = fnmatch.filter(croutine_sections, '.literal.*')
|
||||
filtered_sections.extend(fnmatch.filter(croutine_sections, '.text.*'))
|
||||
|
||||
filtered_sections = [s for s in filtered_sections if not s.endswith('prvCheckPendingReadyList')]
|
||||
filtered_sections.append('.text')
|
||||
flash_text.append(InputSectionDesc(CROUTINE, set(filtered_sections), []))
|
||||
|
||||
# Command for #3 C
|
||||
iram0_text.append(InputSectionDesc(CROUTINE, set(['.text.prvCheckPendingReadyList', '.literal.prvCheckPendingReadyList']), []))
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flag_additions(self):
|
||||
# Test ability to add flags as long as no other mapping fragments
|
||||
# does the same thing.
|
||||
mapping = u"""
|
||||
[mapping:default_add_flag]
|
||||
archive: *
|
||||
entries:
|
||||
* (default);
|
||||
text->flash_text keep
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
flash_text[0].keep = True
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_flag_additions_duplicate(self):
|
||||
# Test same flags added to same entity - these
|
||||
# are ignored.
|
||||
mapping = u"""
|
||||
[mapping:default_add_flag_1]
|
||||
archive: *
|
||||
entries:
|
||||
* (default);
|
||||
text->flash_text keep
|
||||
|
||||
[mapping:default_add_flag_2]
|
||||
archive: *
|
||||
entries:
|
||||
* (default);
|
||||
text->flash_text keep
|
||||
"""
|
||||
|
||||
self.add_fragments(mapping)
|
||||
|
||||
actual = self.generation.generate(self.entities)
|
||||
expected = self.generate_default_rules()
|
||||
|
||||
flash_text = expected['flash_text']
|
||||
flash_text[0].keep = True
|
||||
|
||||
self.compare_rules(expected, actual)
|
||||
|
||||
def test_flags_flag_additions_conflict(self):
|
||||
# Test condition where multiple fragments specifies flags
|
||||
# to same entity - should generate exception.
|
||||
mapping = u"""
|
||||
[mapping:default_add_flag_1]
|
||||
archive: *
|
||||
entries:
|
||||
* (default);
|
||||
text->flash_text align(2)
|
||||
|
||||
[mapping:default_add_flag_2]
|
||||
archive: *
|
||||
entries:
|
||||
* (default);
|
||||
text->flash_text emit(sym1)
|
||||
"""
|
||||
self.add_fragments(mapping)
|
||||
|
||||
with self.assertRaises(GenerationException):
|
||||
self.generation.generate(self.entities)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -19,10 +19,10 @@ import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from output_commands import InputSectionDesc
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from output_commands import InputSectionDesc
|
||||
from output_commands import InputSectionDesc, SymbolAtAddress, AlignAtAddress
|
||||
|
||||
from entity import Entity
|
||||
|
||||
@@ -34,7 +34,7 @@ CROUTINE = Entity('libfreertos.a', 'croutine')
|
||||
|
||||
class InputSectionDescTest(unittest.TestCase):
|
||||
|
||||
def test_output_00(self):
|
||||
def test_catch_all_placement(self):
|
||||
# Test default (catch-all) command
|
||||
expected = '*(.literal .literal.* .text .text.*)'
|
||||
|
||||
@@ -44,7 +44,7 @@ class InputSectionDescTest(unittest.TestCase):
|
||||
desc = InputSectionDesc(Entity(Entity.ALL), SECTIONS)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_output_01(self):
|
||||
def test_lib_placement(self):
|
||||
# Test library placement command
|
||||
expected = '*libfreertos.a:(.literal .literal.* .text .text.*)'
|
||||
|
||||
@@ -60,7 +60,7 @@ class InputSectionDescTest(unittest.TestCase):
|
||||
desc = InputSectionDesc(Entity('libfreertos.a', Entity.ALL, Entity.ALL), SECTIONS)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_output_02(self):
|
||||
def test_obj_placement(self):
|
||||
# Test object placement command
|
||||
expected = '*libfreertos.a:croutine.*(.literal .literal.* .text .text.*)'
|
||||
|
||||
@@ -79,7 +79,7 @@ class InputSectionDescTest(unittest.TestCase):
|
||||
desc = InputSectionDesc(Entity('libfreertos.a', 'croutine.c'), SECTIONS)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_output_03(self):
|
||||
def test_invalid_entity(self):
|
||||
# Invalid entity specification
|
||||
with self.assertRaises(AssertionError):
|
||||
InputSectionDesc(Entity('libfreertos.a', 'croutine', 'prvCheckPendingReadyList'), SECTIONS)
|
||||
@@ -90,7 +90,7 @@ class InputSectionDescTest(unittest.TestCase):
|
||||
with self.assertRaises(AssertionError):
|
||||
InputSectionDesc(Entity('libfreertos.a', 'croutine'), SECTIONS, [Entity('libfreertos.a', 'croutine', 'prvCheckPendingReadyList')])
|
||||
|
||||
def test_output_04(self):
|
||||
def test_exclusions(self):
|
||||
# Test exclusions
|
||||
|
||||
# Library
|
||||
@@ -129,13 +129,114 @@ class InputSectionDescTest(unittest.TestCase):
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')])
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_output_05(self):
|
||||
def test_empty_sections(self):
|
||||
# Test empty sections
|
||||
expected = '*libfreertos.a:croutine.*( )'
|
||||
|
||||
desc = InputSectionDesc(Entity('libfreertos.a', 'croutine'), [])
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_keep(self):
|
||||
# Test keep
|
||||
expected = 'KEEP(*libfreertos.a:croutine.*( ))'
|
||||
|
||||
desc = InputSectionDesc(Entity('libfreertos.a', 'croutine'), [], keep=True)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
def test_sort(self):
|
||||
# Test sort
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal) '
|
||||
'SORT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*) '
|
||||
'SORT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text) '
|
||||
'SORT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*)'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=(None, None))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal) '
|
||||
'SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*) '
|
||||
'SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text) '
|
||||
'SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*)'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('name', None))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal) '
|
||||
'SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*) '
|
||||
'SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text) '
|
||||
'SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*)'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('alignment', None))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_INIT_PRIORITY(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal) '
|
||||
'SORT_BY_INIT_PRIORITY(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*) '
|
||||
'SORT_BY_INIT_PRIORITY(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text) '
|
||||
'SORT_BY_INIT_PRIORITY(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*)'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('init_priority', None))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_NAME(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal)) '
|
||||
'SORT_BY_NAME(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*)) '
|
||||
'SORT_BY_NAME(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text)) '
|
||||
'SORT_BY_NAME(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*))'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('name', 'alignment'))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_NAME(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal)) '
|
||||
'SORT_BY_NAME(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*)) '
|
||||
'SORT_BY_NAME(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text)) '
|
||||
'SORT_BY_NAME(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*))'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('name', 'name'))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_NAME(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*))'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('alignment', 'name'))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
expected = ('*libfreertos.a:croutine.*('
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .literal.*)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text)) '
|
||||
'SORT_BY_ALIGNMENT(SORT_BY_ALIGNMENT(EXCLUDE_FILE(*libfreertos.a:croutine.c.*) .text.*))'
|
||||
')')
|
||||
desc = InputSectionDesc(CROUTINE, SECTIONS, [Entity('libfreertos.a', 'croutine.c')], keep=False, sort=('alignment', 'alignment'))
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
|
||||
class SymbolAtAddressTest(unittest.TestCase):
|
||||
|
||||
def test_symbol(self):
|
||||
symbol = 'test_symbol'
|
||||
expected = '%s = ABSOLUTE(.);' % symbol
|
||||
|
||||
desc = SymbolAtAddress(symbol)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
|
||||
class AlignAtAddressTest(unittest.TestCase):
|
||||
|
||||
def test_align(self):
|
||||
align = 8
|
||||
expected = '. = ALIGN(%d);' % 8
|
||||
|
||||
desc = AlignAtAddress(align)
|
||||
self.assertEqual(expected, str(desc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Reference in New Issue
Block a user