Skip to content

Commit

Permalink
Merge pull request #313 from fellowapp/py310-ruff
Browse files Browse the repository at this point in the history
Drop support for Python < 3.10 and add additional Ruff rules
  • Loading branch information
sciyoshi authored Apr 22, 2024
2 parents c708b2e + 5dbecf4 commit 8758d89
Show file tree
Hide file tree
Showing 40 changed files with 1,066 additions and 880 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
Expand Down
4 changes: 2 additions & 2 deletions example.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@
"parseDOM": [{"tag": "a[href]"}],
},
"em": {
"parseDOM": [{"tag": "i"}, {"tag": "em"}, {"style": "font-style=italic"}]
"parseDOM": [{"tag": "i"}, {"tag": "em"}, {"style": "font-style=italic"}],
},
"strong": {
"parseDOM": [{"tag": "strong"}, {"tag": "b"}, {"style": "font-weight"}]
"parseDOM": [{"tag": "strong"}, {"tag": "b"}, {"style": "font-weight"}],
},
"code": {"parseDOM": [{"tag": "code"}]},
},
Expand Down
106 changes: 54 additions & 52 deletions prosemirror/model/content.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,11 @@
from typing import (
TYPE_CHECKING,
ClassVar,
Dict,
List,
Literal,
NamedTuple,
NoReturn,
Optional,
TypedDict,
Union,
cast,
)

Expand All @@ -32,11 +29,9 @@ def __init__(self, type: "NodeType", next: "ContentMatch") -> None:

class WrapCacheEntry:
target: "NodeType"
computed: Optional[List["NodeType"]]
computed: list["NodeType"] | None

def __init__(
self, target: "NodeType", computed: Optional[List["NodeType"]]
) -> None:
def __init__(self, target: "NodeType", computed: list["NodeType"] | None) -> None:
self.target = target
self.computed = computed

Expand All @@ -57,16 +52,16 @@ class ContentMatch:

empty: ClassVar["ContentMatch"]
valid_end: bool
next: List[MatchEdge]
wrap_cache: List[WrapCacheEntry]
next: list[MatchEdge]
wrap_cache: list[WrapCacheEntry]

def __init__(self, valid_end: bool) -> None:
self.valid_end = valid_end
self.next = []
self.wrap_cache = []

@classmethod
def parse(cls, string: str, node_types: Dict[str, "NodeType"]) -> "ContentMatch":
def parse(cls, string: str, node_types: dict[str, "NodeType"]) -> "ContentMatch":
stream = TokenStream(string, node_types)
if stream.next() is None:
return ContentMatch.empty
Expand All @@ -84,11 +79,14 @@ def match_type(self, type: "NodeType") -> Optional["ContentMatch"]:
return None

def match_fragment(
self, frag: Fragment, start: int = 0, end: Optional[int] = None
self,
frag: Fragment,
start: int = 0,
end: int | None = None,
) -> Optional["ContentMatch"]:
if end is None:
end = frag.child_count
cur: Optional["ContentMatch"] = self
cur: "ContentMatch" | None = self
i = start
while cur and i < end:
cur = cur.match_type(frag.child(i).type)
Expand All @@ -115,11 +113,14 @@ def compatible(self, other: "ContentMatch") -> bool:
return False

def fill_before(
self, after: Fragment, to_end: bool = False, start_index: int = 0
) -> Optional[Fragment]:
self,
after: Fragment,
to_end: bool = False,
start_index: int = 0,
) -> Fragment | None:
seen = [self]

def search(match: ContentMatch, types: List["NodeType"]) -> Optional[Fragment]:
def search(match: ContentMatch, types: list["NodeType"]) -> Fragment | None:
nonlocal seen
finished = match.match_fragment(after, start_index)
if finished and (not to_end or finished.valid_end):
Expand All @@ -138,17 +139,17 @@ def search(match: ContentMatch, types: List["NodeType"]) -> Optional[Fragment]:

return search(self, [])

def find_wrapping(self, target: "NodeType") -> Optional[List["NodeType"]]:
def find_wrapping(self, target: "NodeType") -> list["NodeType"] | None:
for entry in self.wrap_cache:
if entry.target.name == target.name:
return entry.computed
computed = self.compute_wrapping(target)
self.wrap_cache.append(WrapCacheEntry(target, computed))
return computed

def compute_wrapping(self, target: "NodeType") -> Optional[List["NodeType"]]:
def compute_wrapping(self, target: "NodeType") -> list["NodeType"] | None:
seen = {}
active: List[Active] = [{"match": self, "type": None, "via": None}]
active: list[Active] = [{"match": self, "type": None, "via": None}]
while len(active):
current = active.pop(0)
match = current["match"]
Expand Down Expand Up @@ -181,7 +182,8 @@ def edge_count(self) -> int:

def edge(self, n: int) -> MatchEdge:
if n >= len(self.next):
raise ValueError(f"There's no {n}th edge in this content match")
msg = f"There's no {n}th edge in this content match"
raise ValueError(msg)
return self.next[n]

def __str__(self) -> str:
Expand Down Expand Up @@ -217,23 +219,23 @@ def iteratee(m: "ContentMatch", i: int) -> str:


class TokenStream:
inline: Optional[bool]
tokens: List[str]
inline: bool | None
tokens: list[str]

def __init__(self, string: str, node_types: Dict[str, "NodeType"]) -> None:
def __init__(self, string: str, node_types: dict[str, "NodeType"]) -> None:
self.string = string
self.node_types = node_types
self.inline = None
self.pos = 0
self.tokens = [i for i in TOKEN_REGEX.findall(string) if i.strip()]

def next(self) -> Optional[str]:
def next(self) -> str | None:
try:
return self.tokens[self.pos]
except IndexError:
return None

def eat(self, tok: str) -> Union[int, bool]:
def eat(self, tok: str) -> int | bool:
if self.next() == tok:
pos = self.pos
self.pos += 1
Expand All @@ -242,17 +244,18 @@ def eat(self, tok: str) -> Union[int, bool]:
return False

def err(self, str: str) -> NoReturn:
raise SyntaxError(f'{str} (in content expression) "{self.string}"')
msg = f'{str} (in content expression) "{self.string}"'
raise SyntaxError(msg)


class ChoiceExpr(TypedDict):
type: Literal["choice"]
exprs: List["Expr"]
exprs: list["Expr"]


class SeqExpr(TypedDict):
type: Literal["seq"]
exprs: List["Expr"]
exprs: list["Expr"]


class PlusExpr(TypedDict):
Expand Down Expand Up @@ -282,7 +285,7 @@ class NameExpr(TypedDict):
value: "NodeType"


Expr = Union[ChoiceExpr, SeqExpr, PlusExpr, StarExpr, OptExpr, RangeExpr, NameExpr]
Expr = ChoiceExpr | SeqExpr | PlusExpr | StarExpr | OptExpr | RangeExpr | NameExpr


def parse_expr(stream: TokenStream) -> Expr:
Expand Down Expand Up @@ -341,16 +344,13 @@ def parse_expr_range(stream: TokenStream, expr: Expr) -> Expr:
min_ = parse_num(stream)
max_ = min_
if stream.eat(","):
if stream.next() != "}":
max_ = parse_num(stream)
else:
max_ = -1
max_ = parse_num(stream) if stream.next() != "}" else -1
if not stream.eat("}"):
stream.err("Unclosed braced range")
return {"type": "range", "min": min_, "max": max_, "expr": expr}


def resolve_name(stream: TokenStream, name: str) -> List["NodeType"]:
def resolve_name(stream: TokenStream, name: str) -> list["NodeType"]:
types = stream.node_types
type = types.get(name)
if type:
Expand Down Expand Up @@ -395,39 +395,41 @@ def iteratee(type: "NodeType") -> Expr:

class Edge(TypedDict):
term: Optional["NodeType"]
to: Optional[int]
to: int | None


def nfa(
expr: Expr,
) -> List[List[Edge]]:
nfa_: List[List[Edge]] = [[]]
) -> list[list[Edge]]:
nfa_: list[list[Edge]] = [[]]

def node() -> int:
nonlocal nfa_
nfa_.append([])
return len(nfa_) - 1

def edge(
from_: int, to: Optional[int] = None, term: Optional["NodeType"] = None
from_: int,
to: int | None = None,
term: Optional["NodeType"] = None,
) -> Edge:
nonlocal nfa_
edge: Edge = {"term": term, "to": to}
nfa_[from_].append(edge)
return edge

def connect(edges: List[Edge], to: int) -> None:
def connect(edges: list[Edge], to: int) -> None:
for edge in edges:
edge["to"] = to

def compile(expr: Expr, from_: int) -> List[Edge]:
def compile(expr: Expr, from_: int) -> list[Edge]:
if expr["type"] == "choice":
return list(
reduce(
lambda out, expr: [*out, *compile(expr, from_)],
expr["exprs"],
cast(List[Edge], []),
)
cast(list[Edge], []),
),
)
elif expr["type"] == "seq":
i = 0
Expand All @@ -452,14 +454,14 @@ def compile(expr: Expr, from_: int) -> List[Edge]:
return [edge(from_), *compile(expr["expr"], from_)]
elif expr["type"] == "range":
cur = from_
for i in range(expr["min"]):
for _i in range(expr["min"]):
next = node()
connect(compile(expr["expr"], cur), next)
cur = next
if expr["max"] == -1:
connect(compile(expr["expr"], cur), cur)
else:
for i in range(expr["min"], expr["max"]):
for _i in range(expr["min"], expr["max"]):
next = node()
edge(cur, next)
connect(compile(expr["expr"], cur), next)
Expand All @@ -477,9 +479,9 @@ def cmp(a: int, b: int) -> int:


def null_from(
nfa: List[List[Edge]],
nfa: list[list[Edge]],
node: int,
) -> List[int]:
) -> list[int]:
result = []

def scan(n: int) -> None:
Expand All @@ -499,21 +501,21 @@ def scan(n: int) -> None:

class DFAState(NamedTuple):
state: "NodeType"
next: List[int]
next: list[int]


def dfa(nfa: List[List[Edge]]) -> ContentMatch:
def dfa(nfa: list[list[Edge]]) -> ContentMatch:
labeled = {}

def explore(states: List[int]) -> ContentMatch:
def explore(states: list[int]) -> ContentMatch:
nonlocal labeled
out: List[DFAState] = []
out: list[DFAState] = []
for node in states:
for item in nfa[node]:
term, to = item.get("term"), item.get("to")
if not term:
continue
set: Optional[List[int]] = None
set: list[int] | None = None
for t in out:
if t[0] == term:
set = t[1]
Expand All @@ -530,7 +532,7 @@ def explore(states: List[int]) -> ContentMatch:
states = out[i][1]
find_by_key = ",".join(str(s) for s in states)
state.next.append(
MatchEdge(out[i][0], labeled.get(find_by_key) or explore(states))
MatchEdge(out[i][0], labeled.get(find_by_key) or explore(states)),
)
return state

Expand All @@ -555,6 +557,6 @@ def check_for_dead_ends(match: ContentMatch, stream: TokenStream) -> None:
if dead:
stream.err(
f'Only non-generatable nodes ({", ".join(nodes)}) in a required '
"position (see https://prosemirror.net/docs/guide/#generatable)"
"position (see https://prosemirror.net/docs/guide/#generatable)",
)
i += 1
17 changes: 10 additions & 7 deletions prosemirror/model/diff.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Optional, TypedDict
from typing import TYPE_CHECKING, TypedDict

from prosemirror.utils import text_length

Expand All @@ -13,7 +13,7 @@ class Diff(TypedDict):
b: int


def find_diff_start(a: "Fragment", b: "Fragment", pos: int) -> Optional[int]:
def find_diff_start(a: "Fragment", b: "Fragment", pos: int) -> int | None:
i = 0
while True:
if a.child_count == i or b.child_count == i:
Expand All @@ -36,7 +36,9 @@ def find_diff_start(a: "Fragment", b: "Fragment", pos: int) -> Optional[int]:
(
index_a
for ((index_a, char_a), (_, char_b)) in zip(
enumerate(child_a.text), enumerate(child_b.text)
enumerate(child_a.text),
enumerate(child_b.text),
strict=True,
)
if char_a != char_b
),
Expand All @@ -52,9 +54,7 @@ def find_diff_start(a: "Fragment", b: "Fragment", pos: int) -> Optional[int]:
i += 1


def find_diff_end(
a: "Fragment", b: "Fragment", pos_a: int, pos_b: int
) -> Optional[Diff]:
def find_diff_end(a: "Fragment", b: "Fragment", pos_a: int, pos_b: int) -> Diff | None:
i_a, i_b = a.child_count, b.child_count
while True:
if i_a == 0 or i_b == 0:
Expand Down Expand Up @@ -94,7 +94,10 @@ def find_diff_end(

if child_a.content.size or child_b.content.size:
inner = find_diff_end(
child_a.content, child_b.content, pos_a - 1, pos_b - 1
child_a.content,
child_b.content,
pos_a - 1,
pos_b - 1,
)
if inner:
return inner
Expand Down
Loading

0 comments on commit 8758d89

Please sign in to comment.