Skip to content

Commit

Permalink
♻️ codebase passes mypy strict
Browse files Browse the repository at this point in the history
  • Loading branch information
Mattcrmx committed May 31, 2024
1 parent a008704 commit 421b6ff
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 47 deletions.
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,8 @@ repos:
hooks:
- id: ruff
args: ["--fix"]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.6.1"
hooks:
- id: mypy
Empty file added py.typed
Empty file.
14 changes: 11 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,6 @@ dev = [
[tool.setuptools.packages.find]
where = ["src"]

[tool.mypy]
ignore_missing_imports = true

[tool.ruff]
line-length = 100
target-version = "py38"
Expand Down Expand Up @@ -56,3 +53,14 @@ convention = "google"
"tests/*.py" = ["D", "R", "S"]
"src/cyheadgen/_lexer.py" = ["N816", "N802", "D415"]
"src/cyheadgen/_parser.py" = ["N816", "N802", "D415", "D205", "E501"]


[tool.mypy]
files = "src"
python_version = "3.8"
ignore_missing_imports = true
strict = true

[[tool.mypy.overrides]]
module = "ply.*"
follow_imports = "skip"
10 changes: 5 additions & 5 deletions src/cyheadgen/_lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
t_ignore = " \t\x0c"


def t_NEWLINE(t):
def t_NEWLINE(t: lex.LexToken) -> None:
r"""\n+"""
t.lexer.lineno += t.value.count("\n")

Expand All @@ -81,24 +81,24 @@ def t_NEWLINE(t):
t_CCONST = r"(L)?\'([^\\\n]|(\\.))*?\'"


def t_ID(t):
def t_ID(t: lex.LexToken) -> lex.LexToken:
r"""[A-Za-z_][\w_]*"""
t.type = keywords_map.get(t.value, "ID")
return t


def t_multiline_comment(t):
def t_multiline_comment(t: lex.LexToken) -> None:
r"""/\*(.|\n)*?\*/"""
t.lexer.lineno += t.value.count("\n")


# skip single and multi line comments
def t_simple_comment(t):
def t_simple_comment(t: lex.LexToken) -> None:
r"""//.*$"""
t.lexer.lineno += t.value.count("\n")


def t_error(t):
def t_error(t: lex.LexToken) -> None:
logger.debug(f"Illegal character {repr(t.value[0])}")
t.lexer.skip(1)

Expand Down
1 change: 1 addition & 0 deletions src/cyheadgen/_parser.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# flake8: noqa
# type: ignore
import cyheadgen._lexer as lexer_module
import ply.yacc as yacc
from cyheadgen.ast import Function, Header, Macro, Argument, CEnum
Expand Down
41 changes: 21 additions & 20 deletions src/cyheadgen/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,20 @@
from typing import List, Literal, Optional


def _resolve_type(value: str) -> str:
"""Resolve the type based on the value."""
if '"' in value:
return Type.CHAR
if "." in value:
return Type.FLOAT
if 32767 < abs(int(value)) < 2147483647:
return Type.LONG
if abs(int(value)) > 2147483647:
return Type.LONG_LONG

return Type.INT


class Node:
"""Node class."""

Expand All @@ -29,12 +43,12 @@ class Header(Node):
name: str
type: Literal["custom", "standard"]

def __post_init__(self):
def __post_init__(self) -> None:
"""Initialize the name."""
if self.type == "custom":
self.name = self.name.strip('"')

def expand(self):
def expand(self) -> None:
"""Expand included header."""
pass

Expand All @@ -44,34 +58,21 @@ class Macro(Node):
"""Macro Node."""

name: Optional[str]
type: Literal["ifndef", "def", "endif"]
type: Literal["ifndef", "define", "endif"]


@dataclass
class Argument(Node):
"""Abstraction for an argument."""

name: str
type: Type = "int"
type: str = "int"
value: Optional[str] = None

def __post_init__(self):
def __post_init__(self) -> None:
"""Initialize the type."""
if self.value:
self.type = self._resolve_type()

def _resolve_type(self) -> Type:
"""Resolve the type based on the value."""
if '"' in self.value:
return Type.CHAR
if "." in self.value:
return Type.FLOAT
if 32767 < abs(int(self.value)) < 2147483647:
return Type.LONG
if abs(int(self.value)) > 2147483647:
return Type.LONG_LONG

return Type.INT
if self.value is not None:
self.type = _resolve_type(value=self.value)


@dataclass(frozen=True)
Expand Down
2 changes: 1 addition & 1 deletion src/cyheadgen/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
class CyHeadGenLexer:
"""The Lexer Wrapper Class."""

def __init__(self):
def __init__(self) -> None:
"""The initialization method."""
self._lexer = ply_lexer

Expand Down
14 changes: 7 additions & 7 deletions src/cyheadgen/parser.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
"""A wrapper for the PLY parser."""

from typing import List
from typing import List, Union

from cyheadgen._parser import ply_parser # type: ignore[attr-defined]
from cyheadgen.ast import Node

from ._parser import ply_parser


class CyHeadGenParser:
"""The Parser Wrapper Class."""

def __init__(self):
def __init__(self) -> None:
"""The initialization method."""
self._parser = ply_parser

def __call__(self, data: str, **kwargs) -> List[Node]:
def __call__(self, data: str, **kwargs: Union[str, bool]) -> List[Node]:
"""Parse the input string.
Args:
Expand All @@ -24,10 +23,11 @@ def __call__(self, data: str, **kwargs) -> List[Node]:
Returns:
the parsed input string.
"""
return self._parser.parse(data, **kwargs)
result: List[Node] = self._parser.parse(data, **kwargs) # mypy needs some help
return result

@classmethod
def parse_file(cls, file_path: str, **kwargs) -> List[Node]:
def parse_file(cls, file_path: str, **kwargs: Union[str, bool]) -> List[Node]:
"""Lex the content of a file.
Args:
Expand Down
14 changes: 8 additions & 6 deletions tests/test_lexer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
"""Test lexer cases."""

from typing import List

import pytest
from cyheadgen import CyHeadGenLexer


@pytest.fixture
def lexer():
@pytest.fixture # type: ignore[misc]
def lexer() -> CyHeadGenLexer:
return CyHeadGenLexer()


Expand Down Expand Up @@ -69,13 +71,13 @@ def lexer():
),
("#endif // API_H", ["#", "endif"]), # test the comment discard
],
)
def test_lexer(lexer, inp, res):
) # type: ignore[misc]
def test_lexer(lexer: CyHeadGenLexer, inp: str, res: List[str]) -> None:
tokens = lexer(inp)
assert [t.value for t in tokens] == res


def test_separated_statements(lexer):
def test_separated_statements(lexer: CyHeadGenLexer) -> None:
inp = """
#infdef API_H
#define API_H
Expand All @@ -100,7 +102,7 @@ def test_separated_statements(lexer):
]


def test_multiline_comment(lexer):
def test_multiline_comment(lexer: CyHeadGenLexer) -> None:
inp = """
#ifndef TOTO_H
#define TOTO_H
Expand Down
12 changes: 7 additions & 5 deletions tests/test_parser.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from typing import List

import pytest
from cyheadgen.ast import Argument, Function, Header, Macro
from cyheadgen.ast import Argument, Function, Header, Macro, Node
from cyheadgen.parser import CyHeadGenParser


@pytest.fixture
def parser():
@pytest.fixture # type: ignore[misc]
def parser() -> CyHeadGenParser:
return CyHeadGenParser()


Expand Down Expand Up @@ -49,6 +51,6 @@ def parser():
),
("#endif // API_H", [Macro(name=None, type="endif")]),
],
)
def test_parse_func(parser, inp, res):
) # type: ignore[misc]
def test_parse_func(parser: CyHeadGenParser, inp: str, res: List[Node]) -> None:
assert parser(inp) == res

0 comments on commit 421b6ff

Please sign in to comment.