Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 3ba92d8f authored by Martin Geisler's avatar Martin Geisler
Browse files

pdl: rename “grammar” to “file” throughout

The word “grammar” describes the rules which determine how to build
the AST. So the top-level AST node is not a “grammar”. Here I chose to
call it a “file”.

Bug: 239150180
Test: m pdl
Change-Id: I4a842e0d2055c4c5e42072b133f0afc3765c5b5c
parent 78b38488
Loading
Loading
Loading
Loading
+3 −3
Original line number Original line Diff line number Diff line
@@ -76,8 +76,8 @@ rust_test_host {
    ],
    ],
}
}


// Generate the python parser+serializer backend for the
// Generate the python parser+serializer backend for the test file
// test grammar located at tests/grammars/test-grammar.pdl.
// located at tests/examples/complex.pdl.
genrule {
genrule {
    name: "pdl_python_generator_test_gen",
    name: "pdl_python_generator_test_gen",
    cmd: "$(location :pdl) $(in) |" +
    cmd: "$(location :pdl) $(in) |" +
@@ -91,7 +91,7 @@ genrule {
        "tests/custom_types.py",
        "tests/custom_types.py",
    ],
    ],
    srcs: [
    srcs: [
        "tests/grammars/test-grammar.pdl",
        "tests/examples/complex.pdl",
    ],
    ],
    out: [
    out: [
        "pdl_test.py",
        "pdl_test.py",
+6 −6
Original line number Original line Diff line number Diff line
@@ -477,7 +477,7 @@ def generate_packet_parser(packet: ast.Declaration) -> List[str]:
    """Generate the parse() function for a toplevel Packet or Struct
    """Generate the parse() function for a toplevel Packet or Struct
       declaration."""
       declaration."""


    parser = FieldParser(byteorder=packet.grammar.byteorder)
    parser = FieldParser(byteorder=packet.file.byteorder)
    for f in packet.fields:
    for f in packet.fields:
        parser.parse(f)
        parser.parse(f)
    parser.done()
    parser.done()
@@ -513,7 +513,7 @@ def generate_derived_packet_parser(packet: ast.Declaration) -> List[str]:
    """Generate the parse() function for a derived Packet or Struct
    """Generate the parse() function for a derived Packet or Struct
       declaration."""
       declaration."""
    print(f"Parsing packet {packet.id}", file=sys.stderr)
    print(f"Parsing packet {packet.id}", file=sys.stderr)
    parser = FieldParser(byteorder=packet.grammar.byteorder)
    parser = FieldParser(byteorder=packet.file.byteorder)
    for f in packet.fields:
    for f in packet.fields:
        parser.parse(f)
        parser.parse(f)
    parser.done()
    parser.done()
@@ -617,13 +617,13 @@ def generate_checksum_declaration_check(decl: ast.ChecksumDeclaration) -> str:


def run(input: argparse.FileType, output: argparse.FileType, custom_type_location: Optional[str]):
def run(input: argparse.FileType, output: argparse.FileType, custom_type_location: Optional[str]):
    #    with open(input) as pdl_json:
    #    with open(input) as pdl_json:
    grammar = ast.Grammar.from_json(json.load(input))
    file = ast.File.from_json(json.load(input))


    core.desugar(grammar)
    core.desugar(file)


    custom_types = []
    custom_types = []
    custom_type_checks = ""
    custom_type_checks = ""
    for d in grammar.declarations:
    for d in file.declarations:
        if isinstance(d, ast.CustomFieldDeclaration):
        if isinstance(d, ast.CustomFieldDeclaration):
            custom_types.append(d.id)
            custom_types.append(d.id)
            custom_type_checks += generate_custom_field_declaration_check(d)
            custom_type_checks += generate_custom_field_declaration_check(d)
@@ -639,7 +639,7 @@ def run(input: argparse.FileType, output: argparse.FileType, custom_type_locatio
    output.write(generate_prelude())
    output.write(generate_prelude())
    output.write(custom_type_checks)
    output.write(custom_type_checks)


    for d in grammar.declarations:
    for d in file.declarations:
        if isinstance(d, ast.EnumDeclaration):
        if isinstance(d, ast.EnumDeclaration):
            output.write(generate_enum_declaration(d))
            output.write(generate_enum_declaration(d))
        elif isinstance(d, (ast.PacketDeclaration, ast.StructDeclaration)):
        elif isinstance(d, (ast.PacketDeclaration, ast.StructDeclaration)):
+11 −11
Original line number Original line Diff line number Diff line
@@ -94,7 +94,7 @@ class FixedField(Field):


    @property
    @property
    def type(self) -> Optional['Declaration']:
    def type(self) -> Optional['Declaration']:
        return self.parent.grammar.typedef_scope[self.enum_id] if self.enum_id else None
        return self.parent.file.typedef_scope[self.enum_id] if self.enum_id else None




@node('reserved_field')
@node('reserved_field')
@@ -112,7 +112,7 @@ class ArrayField(Field):


    @property
    @property
    def type(self) -> Optional['Declaration']:
    def type(self) -> Optional['Declaration']:
        return self.parent.grammar.typedef_scope[self.type_id] if self.type_id else None
        return self.parent.file.typedef_scope[self.type_id] if self.type_id else None




@node('scalar_field')
@node('scalar_field')
@@ -128,7 +128,7 @@ class TypedefField(Field):


    @property
    @property
    def type(self) -> 'Declaration':
    def type(self) -> 'Declaration':
        return self.parent.grammar.typedef_scope[self.type_id]
        return self.parent.file.typedef_scope[self.type_id]




@node('group_field')
@node('group_field')
@@ -139,7 +139,7 @@ class GroupField(Field):


@dataclass
@dataclass
class Declaration(Node):
class Declaration(Node):
    grammar: 'Grammar' = field(init=False)
    file: 'File' = field(init=False)


    def __post_init__(self):
    def __post_init__(self):
        if hasattr(self, 'fields'):
        if hasattr(self, 'fields'):
@@ -182,7 +182,7 @@ class PacketDeclaration(Declaration):


    @property
    @property
    def parent(self) -> Optional['PacketDeclaration']:
    def parent(self) -> Optional['PacketDeclaration']:
        return self.grammar.packet_scope[self.parent_id] if self.parent_id else None
        return self.file.packet_scope[self.parent_id] if self.parent_id else None




@node('struct_declaration')
@node('struct_declaration')
@@ -194,7 +194,7 @@ class StructDeclaration(Declaration):


    @property
    @property
    def parent(self) -> Optional['StructDeclaration']:
    def parent(self) -> Optional['StructDeclaration']:
        return self.grammar.typedef_scope[self.parent_id] if self.parent_id else None
        return self.file.typedef_scope[self.parent_id] if self.parent_id else None




@node('group_declaration')
@node('group_declaration')
@@ -204,7 +204,7 @@ class GroupDeclaration(Declaration):




@dataclass
@dataclass
class Grammar:
class File:
    endianness: EndiannessDeclaration
    endianness: EndiannessDeclaration
    declarations: List[Declaration]
    declarations: List[Declaration]
    packet_scope: Dict[str, Declaration] = field(init=False)
    packet_scope: Dict[str, Declaration] = field(init=False)
@@ -218,7 +218,7 @@ class Grammar:


        # Construct the toplevel declaration scopes.
        # Construct the toplevel declaration scopes.
        for d in self.declarations:
        for d in self.declarations:
            d.grammar = self
            d.file = self
            if isinstance(d, PacketDeclaration):
            if isinstance(d, PacketDeclaration):
                self.packet_scope[d.id] = d
                self.packet_scope[d.id] = d
            elif isinstance(d, GroupDeclaration):
            elif isinstance(d, GroupDeclaration):
@@ -227,11 +227,11 @@ class Grammar:
                self.typedef_scope[d.id] = d
                self.typedef_scope[d.id] = d


    @staticmethod
    @staticmethod
    def from_json(obj: object) -> 'Grammar':
    def from_json(obj: object) -> 'File':
        """Import a Grammar exported as JSON object by the PDL parser."""
        """Import a File exported as JSON object by the PDL parser."""
        endianness = convert_(obj['endianness'])
        endianness = convert_(obj['endianness'])
        declarations = convert_(obj['declarations'])
        declarations = convert_(obj['declarations'])
        return Grammar(endianness, declarations)
        return File(endianness, declarations)


    @property
    @property
    def byteorder(self) -> str:
    def byteorder(self) -> str:
+10 −10
Original line number Original line Diff line number Diff line
@@ -16,7 +16,7 @@ def desugar_field_(field: Field, constraints: Dict[str, Constraint]) -> List[Fie
        return [FixedField(kind='fixed_field', loc=field.loc, enum_id=field.type_id, tag_id=tag_id)]
        return [FixedField(kind='fixed_field', loc=field.loc, enum_id=field.type_id, tag_id=tag_id)]


    elif isinstance(field, GroupField):
    elif isinstance(field, GroupField):
        group = field.parent.grammar.group_scope[field.group_id]
        group = field.parent.file.group_scope[field.group_id]
        constraints = dict([(c.id, c) for c in field.constraints])
        constraints = dict([(c.id, c) for c in field.constraints])
        fields = []
        fields = []
        for f in group.fields:
        for f in group.fields:
@@ -27,14 +27,14 @@ def desugar_field_(field: Field, constraints: Dict[str, Constraint]) -> List[Fie
        return [field]
        return [field]




def desugar(grammar: Grammar):
def desugar(file: File):
    """Inline group fields.
    """Inline group fields.
    Constrained fields are transformed into fixed fields.
    Constrained fields are transformed into fixed fields.
    Group declarations are removed from the grammar object.
    Group declarations are removed from the file object.
    **The original grammar object is modified inline.**"""
    **The original file object is modified inline.**"""


    declarations = []
    declarations = []
    for d in grammar.declarations:
    for d in file.declarations:
        if isinstance(d, GroupDeclaration):
        if isinstance(d, GroupDeclaration):
            continue
            continue


@@ -46,8 +46,8 @@ def desugar(grammar: Grammar):


        declarations.append(d)
        declarations.append(d)


    grammar.declarations = declarations
    file.declarations = declarations
    grammar.group_scope = {}
    file.group_scope = {}




def get_packet_field(packet: Union[PacketDeclaration, StructDeclaration], id: str) -> Optional[Field]:
def get_packet_field(packet: Union[PacketDeclaration, StructDeclaration], id: str) -> Optional[Field]:
@@ -57,10 +57,10 @@ def get_packet_field(packet: Union[PacketDeclaration, StructDeclaration], id: st
        if getattr(f, 'id', None) == id:
        if getattr(f, 'id', None) == id:
            return f
            return f
    if isinstance(packet, PacketDeclaration) and packet.parent_id:
    if isinstance(packet, PacketDeclaration) and packet.parent_id:
        parent = packet.grammar.packet_scope[packet.parent_id]
        parent = packet.file.packet_scope[packet.parent_id]
        return get_packet_field(parent, id)
        return get_packet_field(parent, id)
    elif isinstance(packet, StructDeclaration) and packet.parent_id:
    elif isinstance(packet, StructDeclaration) and packet.parent_id:
        parent = packet.grammar.typedef_scope[packet.parent_id]
        parent = packet.file.typedef_scope[packet.parent_id]
        return get_packet_field(parent, id)
        return get_packet_field(parent, id)
    else:
    else:
        return None
        return None
@@ -74,7 +74,7 @@ def get_derived_packets(decl: Union[PacketDeclaration, StructDeclaration]
    are traversed."""
    are traversed."""


    children = []
    children = []
    for d in decl.grammar.declarations:
    for d in decl.file.declarations:
        if type(d) is type(decl) and d.parent_id == decl.id:
        if type(d) is type(decl) and d.parent_id == decl.id:
            if (len(d.fields) == 1 and isinstance(d.fields[0], (PayloadField, BodyField))):
            if (len(d.fields) == 1 and isinstance(d.fields[0], (PayloadField, BodyField))):
                children.extend([(d.constraints + sub_constraints, sub_child)
                children.extend([(d.constraints + sub_constraints, sub_child)
+4 −4
Original line number Original line Diff line number Diff line
@@ -148,7 +148,7 @@ pub enum Decl {
}
}


#[derive(Debug, Serialize)]
#[derive(Debug, Serialize)]
pub struct Grammar {
pub struct File {
    pub version: String,
    pub version: String,
    pub file: FileId,
    pub file: FileId,
    pub comments: Vec<Comment>,
    pub comments: Vec<Comment>,
@@ -210,9 +210,9 @@ impl ops::Add<SourceRange> for SourceRange {
    }
    }
}
}


impl Grammar {
impl File {
    pub fn new(file: FileId) -> Grammar {
    pub fn new(file: FileId) -> File {
        Grammar {
        File {
            version: "1,0".to_owned(),
            version: "1,0".to_owned(),
            comments: vec![],
            comments: vec![],
            // The endianness is mandatory, so this default value will
            // The endianness is mandatory, so this default value will
Loading