Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 09ea5fbc authored by Treehugger Robot's avatar Treehugger Robot Committed by Automerger Merge Worker
Browse files

Merge "pdl: Implement python parser generator for pdl source files" am: 31c960af

parents 26f660b2 31c960af
Loading
Loading
Loading
Loading
+37 −0
Original line number Diff line number Diff line
@@ -70,3 +70,40 @@ rust_test_host {
        ":rustfmt",
    ],
}

// Generate the python parser+serializer backend for the
// test grammar located at test/grammars/test-grammar.pdl.
genrule {
    name: "pdl_python_generator_test_gen",
    cmd: "$(location :pdl) $(in) |" +
        " $(location scripts/generate_python_backend.py)" +
        " --output $(out) --custom-type-location test.custom_types",
    tools: [ ":pdl" ],
    tool_files: [
        "scripts/generate_python_backend.py",
        "scripts/pdl/core.py",
        "scripts/pdl/ast.py",
        "test/custom_types.py",
    ],
    srcs: [
        "test/grammars/test-grammar.pdl",
    ],
    out: [
        "pdl_test.py",
    ],
}

// Test the generated python parser+serializer against
// pre-generated binary inputs.
python_test_host {
    name: "pdl_python_generator_test",
    main: "test/python_generator_test.py",
    srcs: [
        "test/python_generator_test.py",
        "test/custom_types.py",
        ":pdl_python_generator_test_gen",
    ],
    test_options: {
        unit_test: true,
    },
}
+660 −0

File added.

Preview size limit exceeded, changes collapsed.

+258 −0
Original line number Diff line number Diff line
from dataclasses import dataclass, field
from typing import Optional, List, Dict

constructors_ = dict()


def node(kind: str):

    def decorator(cls):
        cls = dataclass(cls)
        constructors_[kind] = cls
        return cls

    return decorator


@dataclass
class SourceLocation:
    offset: int
    line: int
    column: int


@dataclass
class SourceRange:
    file: int
    start: SourceLocation
    end: SourceLocation


@dataclass
class Node:
    kind: str
    loc: SourceLocation


@node('tag')
class Tag(Node):
    id: str
    value: int


@node('constraint')
class Constraint(Node):
    id: str
    value: Optional[int]
    tag_id: Optional[str]


@dataclass
class Field(Node):
    parent: Node = field(init=False)


@node('checksum_field')
class ChecksumField(Field):
    field_id: str


@node('padding_field')
class PaddingField(Field):
    width: int


@node('size_field')
class SizeField(Field):
    field_id: str
    width: int


@node('count_field')
class CountField(Field):
    field_id: str
    width: int


@node('body_field')
class BodyField(Field):
    id: str = field(init=False, default='_body_')


@node('payload_field')
class PayloadField(Field):
    size_modifier: Optional[str]
    id: str = field(init=False, default='_payload_')


@node('fixed_field')
class FixedField(Field):
    width: Optional[int] = None
    value: Optional[int] = None
    enum_id: Optional[str] = None
    tag_id: Optional[str] = None

    @property
    def type(self) -> Optional['Declaration']:
        return self.parent.grammar.typedef_scope[self.enum_id] if self.enum_id else None


@node('reserved_field')
class ReservedField(Field):
    width: int


@node('array_field')
class ArrayField(Field):
    id: str
    width: Optional[int]
    type_id: Optional[str]
    size_modifier: Optional[str]
    size: Optional[int]

    @property
    def type(self) -> Optional['Declaration']:
        return self.parent.grammar.typedef_scope[self.type_id] if self.type_id else None


@node('scalar_field')
class ScalarField(Field):
    id: str
    width: int


@node('typedef_field')
class TypedefField(Field):
    id: str
    type_id: str

    @property
    def type(self) -> 'Declaration':
        return self.parent.grammar.typedef_scope[self.type_id]


@node('group_field')
class GroupField(Field):
    group_id: str
    constraints: List[Constraint]


@dataclass
class Declaration(Node):
    grammar: 'Grammar' = field(init=False)

    def __post_init__(self):
        if hasattr(self, 'fields'):
            for f in self.fields:
                f.parent = self


@node('endianness_declaration')
class EndiannessDeclaration(Node):
    value: str


@node('checksum_declaration')
class ChecksumDeclaration(Declaration):
    id: str
    function: str
    width: int


@node('custom_field_declaration')
class CustomFieldDeclaration(Declaration):
    id: str
    function: str
    width: Optional[int]


@node('enum_declaration')
class EnumDeclaration(Declaration):
    id: str
    tags: List[Tag]
    width: int


@node('packet_declaration')
class PacketDeclaration(Declaration):
    id: str
    parent_id: Optional[str]
    constraints: List[Constraint]
    fields: List[Field]

    @property
    def parent(self) -> Optional['PacketDeclaration']:
        return self.grammar.packet_scope[self.parent_id] if self.parent_id else None


@node('struct_declaration')
class StructDeclaration(Declaration):
    id: str
    parent_id: Optional[str]
    constraints: List[Constraint]
    fields: List[Field]

    @property
    def parent(self) -> Optional['StructDeclaration']:
        return self.grammar.typedef_scope[self.parent_id] if self.parent_id else None


@node('group_declaration')
class GroupDeclaration(Declaration):
    id: str
    fields: List[Field]


@dataclass
class Grammar:
    endianness: EndiannessDeclaration
    declarations: List[Declaration]
    packet_scope: Dict[str, Declaration] = field(init=False)
    typedef_scope: Dict[str, Declaration] = field(init=False)
    group_scope: Dict[str, Declaration] = field(init=False)

    def __post_init__(self):
        self.packet_scope = dict()
        self.typedef_scope = dict()
        self.group_scope = dict()

        # Construct the toplevel declaration scopes.
        for d in self.declarations:
            d.grammar = self
            if isinstance(d, PacketDeclaration):
                self.packet_scope[d.id] = d
            elif isinstance(d, GroupDeclaration):
                self.group_scope[d.id] = d
            else:
                self.typedef_scope[d.id] = d

    @staticmethod
    def from_json(obj: object) -> 'Grammar':
        """Import a Grammar exported as JSON object by the PDL parser."""
        endianness = convert_(obj['endianness'])
        declarations = convert_(obj['declarations'])
        return Grammar(endianness, declarations)

    @property
    def byteorder(self) -> str:
        return 'little' if self.endianness.value == 'little_endian' else 'big'


def convert_(obj: object) -> object:
    if obj is None:
        return None
    if isinstance(obj, int) or isinstance(obj, str):
        return obj
    if isinstance(obj, list):
        return [convert_(elt) for elt in obj]
    if isinstance(obj, object):
        kind = obj['kind']
        loc = obj['loc']
        loc = SourceRange(loc['file'], SourceLocation(**loc['start']), SourceLocation(**loc['end']))
        constructor = constructors_.get(kind)
        members = {'loc': loc, 'kind': kind}
        for name, value in obj.items():
            if name != 'kind' and name != 'loc':
                members[name] = convert_(value)
        return constructor(**members)
    raise Exception('Unhandled json object type')
+184 −0
Original line number Diff line number Diff line
from typing import Optional, List, Dict, Union, Tuple
from .ast import *


def desugar_field_(field: Field, constraints: Dict[str, Constraint]) -> List[Field]:
    """Inline group and constrained fields.
    Constrained fields are transformed into fixed fields.
    Group fields are inlined and recursively desugared."""

    if isinstance(field, ScalarField) and field.id in constraints:
        value = constraints[field.id].value
        return [FixedField(kind='fixed_field', loc=field.loc, width=field.width, value=value)]

    elif isinstance(field, TypedefField) and field.id in constraints:
        tag_id = constraints[field.id].tag_id
        return [FixedField(kind='fixed_field', loc=field.loc, enum_id=field.type_id, tag_id=tag_id)]

    elif isinstance(field, GroupField):
        group = field.parent.grammar.group_scope[field.group_id]
        constraints = dict([(c.id, c) for c in field.constraints])
        fields = []
        for f in group.fields:
            fields.extend(desugar_field_(f, constraints))
        return fields

    else:
        return [field]


def desugar(grammar: Grammar):
    """Inline group fields.
    Constrained fields are transformed into fixed fields.
    Group declarations are removed from the grammar object.
    **The original grammar object is modified inline.**"""

    declarations = []
    for d in grammar.declarations:
        if isinstance(d, GroupDeclaration):
            continue

        if isinstance(d, (PacketDeclaration, StructDeclaration)):
            fields = []
            for f in d.fields:
                fields.extend(desugar_field_(f, {}))
            d.fields = fields

        declarations.append(d)

    grammar.declarations = declarations
    grammar.group_scope = {}


def get_packet_field(packet: Union[PacketDeclaration, StructDeclaration], id: str) -> Optional[Field]:
    """Return the field with selected identifier declared in the provided
    packet or its ancestors."""
    for f in packet.fields:
        if getattr(f, 'id', None) == id:
            return f
    if isinstance(packet, PacketDeclaration) and packet.parent_id:
        parent = packet.grammar.packet_scope[packet.parent_id]
        return get_packet_field(parent, id)
    elif isinstance(packet, StructDeclaration) and packet.parent_id:
        parent = packet.grammar.typedef_scope[packet.parent_id]
        return get_packet_field(parent, id)
    else:
        return None


def get_derived_packets(decl: Union[PacketDeclaration, StructDeclaration]
                       ) -> List[Tuple[List[Constraint], Union[PacketDeclaration, StructDeclaration]]]:
    """Return the list of packets or structs that immediately derive from the
    selected packet or struct, coupled with the field constraints.
    Packet aliases (containing no field declarations other than a payload)
    are traversed."""

    children = []
    for d in decl.grammar.declarations:
        if type(d) is type(decl) and d.parent_id == decl.id:
            if (len(d.fields) == 1 and isinstance(d.fields[0], (PayloadField, BodyField))):
                children.extend([(d.constraints + sub_constraints, sub_child)
                                 for (sub_constraints, sub_child) in get_derived_packets(d)])
            else:
                children.append((d.constraints, d))
    return children


def get_field_size(field: Field, skip_payload: bool = False) -> Optional[int]:
    """Determine the size of a field in bits, if possible.
    If the field is dynamically sized (e.g. unsized array or payload field),
    None is returned instead. If skip_payload is set, payload and body fields
    are counted as having size 0 rather than a variable size."""

    if isinstance(field, (ScalarField, SizeField, CountField, ReservedField)):
        return field.width

    elif isinstance(field, FixedField):
        return field.width or field.type.width

    elif isinstance(field, PaddingField):
        return field.width * 8

    elif isinstance(field, ArrayField) and field.size is not None:
        element_width = field.width or get_declaration_size(field.type)
        return element_width * field.size if element_width is not None else None

    elif isinstance(field, TypedefField):
        return get_declaration_size(field.type)

    elif isinstance(field, ChecksumField):
        return 0

    elif isinstance(field, (PayloadField, BodyField)) and skip_payload:
        return 0

    else:
        return None


def get_declaration_size(decl: Declaration, skip_payload: bool = False) -> Optional[int]:
    """Determine the size of a declaration type in bits, if possible.
    If the type is dynamically sized (e.g. contains an array or payload),
    None is returned instead. If skip_payload is set, payload and body fields
    are counted as having size 0 rather than a variable size."""

    if isinstance(decl, (EnumDeclaration, CustomFieldDeclaration, ChecksumDeclaration)):
        return decl.width

    elif isinstance(decl, (PacketDeclaration, StructDeclaration)):
        parent = decl.parent
        packet_size = get_declaration_size(parent, skip_payload=True) if parent else 0
        for f in decl.fields:
            field_size = get_field_size(f, skip_payload=skip_payload)
            if field_size is None:
                return None
            packet_size += field_size
        return packet_size

    else:
        return None


def get_array_field_size(field: ArrayField) -> Union[None, int, Field]:
    """Return the array static size, size field, or count field.
    If the array is unsized None is returned instead."""

    if field.size is not None:
        return field.size
    for f in field.parent.fields:
        if isinstance(f, (SizeField, CountField)) and f.field_id == field.id:
            return f
    return None


def get_payload_field_size(field: Union[PayloadField, BodyField]) -> Optional[Field]:
    """Return the payload or body size field.
    If the payload is unsized None is returned instead."""

    for f in field.parent.fields:
        if isinstance(f, SizeField) and f.field_id == field.id:
            return f
    return None


def get_array_element_size(field: ArrayField) -> Optional[int]:
    """Return the array element size, if possible.
    If the element size is not known at compile time,
    None is returned instead."""

    return field.width or get_declaration_size(field.type)


def is_bit_field(field: Field) -> bool:
    """Identify fields that can have bit granularity.
    These include: ScalarField, FixedField, TypedefField with enum type,
    SizeField, and CountField. Returns the size of the field in bits."""

    if isinstance(field, (ScalarField, SizeField, CountField, FixedField, ReservedField)):
        return True

    elif isinstance(field, TypedefField) and isinstance(field.type, EnumDeclaration):
        return True

    else:
        return False
+42 −0
Original line number Diff line number Diff line
from dataclasses import dataclass
from typing import Tuple


@dataclass
class SizedCustomField:

    def __init__(self, value: int):
        self.value = value

    def parse(span: bytes) -> Tuple['SizedCustomField', bytes]:
        return (SizedCustomField(span[0]), span[1:])

    def parse_all(span: bytes) -> 'SizedCustomField':
        assert (len(span) == 1)
        return SizedCustomField(span[0])

    @property
    def size(self) -> int:
        return 1


@dataclass
class UnsizedCustomField:

    def __init__(self, value: int):
        self.value = value

    def parse(span: bytes) -> Tuple['UnsizedCustomField', bytes]:
        return (UnsizedCustomField(span[0]), span[1:])

    def parse_all(span: bytes) -> 'UnsizedCustomField':
        assert (len(span) == 1)
        return UnsizedCustomField(span[0])

    @property
    def size(self) -> int:
        return 1


def Checksum(span: bytes) -> int:
    return sum(span) % 256
Loading