Skip to content

Commit dbbaaab

Browse files
authored
Merge pull request #128 from waketzheng/use-ruff
refactor: migrate lint tool from isort+black to ruff and apply future style type hints
2 parents 74810e9 + 3867af1 commit dbbaaab

File tree

9 files changed

+475
-488
lines changed

9 files changed

+475
-488
lines changed

.github/workflows/ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
options: --health-cmd="mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 5
1818
strategy:
1919
matrix:
20-
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12", "3.13" ]
20+
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
2121
steps:
2222
- uses: actions/cache@v4
2323
with:
@@ -48,7 +48,7 @@ jobs:
4848
options: --health-cmd="mariadb-admin ping -uroot -p${MYSQL_ROOT_PASSWORD}" --health-interval 10s --health-timeout 5s --health-retries 5
4949
strategy:
5050
matrix:
51-
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12", "3.13" ]
51+
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
5252
steps:
5353
- uses: actions/cache@v4
5454
with:

Makefile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@ up:
66
@poetry update
77

88
deps:
9-
@poetry install
9+
@poetry install --all-groups
1010

1111
_style:
12-
@isort -src $(checkfiles)
13-
@black $(checkfiles)
12+
@ruff format $(checkfiles)
13+
@ruff check --fix $(checkfiles)
1414

1515
style: deps _style
1616

1717
_check:
18-
@black --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
18+
@ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
1919
@ruff check $(checkfiles)
2020
@mypy $(checkfiles)
2121

asyncmy/contexts.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1+
from __future__ import annotations
12
from collections.abc import Coroutine
2-
from typing import Any, Iterator
3+
from typing import Any
4+
from collections.abc import Iterator
35

46

57
class _ContextManager(Coroutine):

asyncmy/replication/binlogstream.py

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
from __future__ import annotations
12
import struct
2-
from typing import Any, Dict, List, Optional, Set, Type, Union
3+
from typing import Any
34

45
from asyncmy import Connection
56
from asyncmy.constants.COMMAND import COM_BINLOG_DUMP, COM_BINLOG_DUMP_GTID, COM_REGISTER_SLAVE
@@ -37,7 +38,7 @@
3738

3839

3940
class ReportSlave:
40-
def __init__(self, value: Union[str, tuple, dict]):
41+
def __init__(self, value: str | tuple | dict):
4142
self._hostname = ""
4243
self._username = ""
4344
self._password = "" # nosec: B105
@@ -101,22 +102,22 @@ def __init__(
101102
connection: Connection,
102103
ctl_connection: Connection,
103104
server_id: int,
104-
slave_uuid: Optional[str] = None,
105-
slave_heartbeat: Optional[int] = None,
106-
report_slave: Optional[Union[str, tuple, dict]] = None,
107-
master_log_file: Optional[str] = None,
108-
master_log_position: Optional[int] = None,
109-
master_auto_position: Optional[Set[Gtid]] = None,
105+
slave_uuid: str | None = None,
106+
slave_heartbeat: int | None = None,
107+
report_slave: str | tuple | dict | None = None,
108+
master_log_file: str | None = None,
109+
master_log_position: int | None = None,
110+
master_auto_position: set[Gtid] | None = None,
110111
resume_stream: bool = False,
111112
blocking: bool = False,
112-
skip_to_timestamp: Optional[int] = None,
113-
only_events: Optional[List[Type[BinLogEvent]]] = None,
114-
ignored_events: Optional[List[Type[BinLogEvent]]] = None,
113+
skip_to_timestamp: int | None = None,
114+
only_events: list[type[BinLogEvent]] | None = None,
115+
ignored_events: list[type[BinLogEvent]] | None = None,
115116
filter_non_implemented_events: bool = True,
116-
only_tables: Optional[List[str]] = None,
117-
ignored_tables: Optional[List[str]] = None,
118-
only_schemas: Optional[List[str]] = None,
119-
ignored_schemas: Optional[List[str]] = None,
117+
only_tables: list[str] | None = None,
118+
ignored_tables: list[str] | None = None,
119+
only_schemas: list[str] | None = None,
120+
ignored_schemas: list[str] | None = None,
120121
freeze_schema: bool = False,
121122
):
122123
self._freeze_schema = freeze_schema
@@ -149,12 +150,12 @@ def __init__(
149150
RotateEvent,
150151
*self._allowed_events,
151152
]
152-
self._table_map: Dict[str, Any] = {}
153+
self._table_map: dict[str, Any] = {}
153154

154155
@staticmethod
155156
def _allowed_event_list(
156-
only_events: Optional[List[Type[BinLogEvent]]],
157-
ignored_events: Optional[List[Type[BinLogEvent]]],
157+
only_events: list[type[BinLogEvent]] | None,
158+
ignored_events: list[type[BinLogEvent]] | None,
158159
filter_non_implemented_events: bool,
159160
):
160161
if only_events is not None:

asyncmy/replication/events.py

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,7 @@ class GtidEvent(BinLogEvent):
4848
"""GTID change in binlog event"""
4949

5050
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
51-
super(GtidEvent, self).__init__(
52-
from_packet, event_size, table_map, ctl_connection, **kwargs
53-
)
51+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
5452

5553
self.commit_flag = self.packet.read(1) == 1
5654
self.sid = self.packet.read(16)
@@ -82,9 +80,7 @@ class RotateEvent(BinLogEvent):
8280
"""
8381

8482
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
85-
super(RotateEvent, self).__init__(
86-
from_packet, event_size, table_map, ctl_connection, **kwargs
87-
)
83+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
8884
self.position = struct.unpack("<Q", self.packet.read(8))[0]
8985
self.next_binlog = self.packet.read(event_size - 8).decode()
9086

@@ -105,15 +101,13 @@ class XidEvent(BinLogEvent):
105101
"""
106102

107103
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
108-
super(XidEvent, self).__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
104+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
109105
self.xid = struct.unpack("<Q", self.packet.read(8))[0]
110106

111107

112108
class HeartbeatLogEvent(BinLogEvent):
113109
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
114-
super(HeartbeatLogEvent, self).__init__(
115-
from_packet, event_size, table_map, ctl_connection, **kwargs
116-
)
110+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
117111
self.ident = self.packet.read(event_size).decode()
118112

119113

@@ -122,9 +116,7 @@ class QueryEvent(BinLogEvent):
122116
Only replicated queries are logged."""
123117

124118
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
125-
super(QueryEvent, self).__init__(
126-
from_packet, event_size, table_map, ctl_connection, **kwargs
127-
)
119+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
128120

129121
# Post-header
130122
self.slave_proxy_id = self.packet.read_uint32()
@@ -153,9 +145,7 @@ class BeginLoadQueryEvent(BinLogEvent):
153145
"""
154146

155147
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
156-
super(BeginLoadQueryEvent, self).__init__(
157-
from_packet, event_size, table_map, ctl_connection, **kwargs
158-
)
148+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
159149

160150
# Payload
161151
self.file_id = self.packet.read_uint32()
@@ -179,9 +169,7 @@ class ExecuteLoadQueryEvent(BinLogEvent):
179169
"""
180170

181171
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
182-
super(ExecuteLoadQueryEvent, self).__init__(
183-
from_packet, event_size, table_map, ctl_connection, **kwargs
184-
)
172+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
185173

186174
# Post-header
187175
self.slave_proxy_id = self.packet.read_uint32()
@@ -206,9 +194,7 @@ class IntvarEvent(BinLogEvent):
206194
"""
207195

208196
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
209-
super(IntvarEvent, self).__init__(
210-
from_packet, event_size, table_map, ctl_connection, **kwargs
211-
)
197+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
212198

213199
# Payload
214200
self.type = self.packet.read_uint8()
@@ -217,7 +203,5 @@ def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs)
217203

218204
class NotImplementedEvent(BinLogEvent):
219205
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
220-
super(NotImplementedEvent, self).__init__(
221-
from_packet, event_size, table_map, ctl_connection, **kwargs
222-
)
206+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
223207
self.packet.advance(event_size)

asyncmy/replication/gtid.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1+
from __future__ import annotations
12
import binascii
23
import re
34
import struct
45
from io import BytesIO
5-
from typing import Set, Union
66

77

88
class Gtid:
@@ -30,7 +30,7 @@ def parse_interval(interval):
3030
@staticmethod
3131
def parse(gtid: str):
3232
m = re.search(
33-
"^([0-9a-fA-F]{8}(?:-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12})" "((?::[0-9-]+)+)$",
33+
"^([0-9a-fA-F]{8}(?:-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12})((?::[0-9-]+)+)$",
3434
gtid,
3535
)
3636
if not m:
@@ -118,7 +118,7 @@ def __add__(self, other):
118118
"""Include the transactions of this gtid. Raise if the
119119
attempted merge has different SID"""
120120
if self.sid != other.sid:
121-
raise Exception("Attempt to merge different SID" "%s != %s" % (self.sid, other.sid))
121+
raise Exception("Attempt to merge different SID%s != %s" % (self.sid, other.sid))
122122

123123
result = Gtid(str(self))
124124

@@ -232,7 +232,7 @@ def __ge__(self, other):
232232

233233

234234
class GtidSet:
235-
def __init__(self, gtid_set: Set[Gtid]):
235+
def __init__(self, gtid_set: set[Gtid]):
236236
self._gtid_set = gtid_set
237237

238238
def merge_gtid(self, gtid: Gtid):
@@ -246,14 +246,14 @@ def merge_gtid(self, gtid: Gtid):
246246
new_gtid_set.add(gtid)
247247
self._gtid_set = new_gtid_set
248248

249-
def __contains__(self, other: Union[Gtid, "GtidSet"]):
249+
def __contains__(self, other: Gtid | GtidSet):
250250
if isinstance(other, GtidSet):
251251
return all(other_gtid in self._gtid_set for other_gtid in other._gtid_set)
252252
if isinstance(other, Gtid):
253253
return any(other in x for x in self._gtid_set)
254254
raise NotImplementedError
255255

256-
def __add__(self, other: Union[Gtid, "GtidSet"]):
256+
def __add__(self, other: Gtid | GtidSet):
257257
if isinstance(other, Gtid):
258258
new = GtidSet(self._gtid_set)
259259
new.merge_gtid(other)
@@ -289,5 +289,5 @@ def decode(cls, payload: BytesIO):
289289
(n_sid,) = struct.unpack("<Q", payload.read(8))
290290
return cls(set(Gtid.decode(payload) for _ in range(0, n_sid)))
291291

292-
def __eq__(self, other: "GtidSet"): # type: ignore[override]
292+
def __eq__(self, other: GtidSet): # type: ignore[override]
293293
return self._gtid_set == other._gtid_set

asyncmy/replication/row_events.py

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,7 @@
4545

4646
class RowsEvent(BinLogEvent):
4747
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
48-
super(RowsEvent, self).__init__(
49-
from_packet, event_size, table_map, ctl_connection, **kwargs
50-
)
48+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
5149
self._rows = None
5250
self._only_tables = kwargs["only_tables"]
5351
self._ignored_tables = kwargs["ignored_tables"]
@@ -462,9 +460,7 @@ class DeleteRowsEvent(RowsEvent):
462460
"""
463461

464462
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
465-
super(DeleteRowsEvent, self).__init__(
466-
from_packet, event_size, table_map, ctl_connection, **kwargs
467-
)
463+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
468464
if self._processed:
469465
self.columns_present_bitmap = self.packet.read((self.number_of_columns + 7) / 8)
470466

@@ -479,9 +475,7 @@ class WriteRowsEvent(RowsEvent):
479475
"""
480476

481477
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
482-
super(WriteRowsEvent, self).__init__(
483-
from_packet, event_size, table_map, ctl_connection, **kwargs
484-
)
478+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
485479
if self._processed:
486480
self.columns_present_bitmap = self.packet.read((self.number_of_columns + 7) / 8)
487481

@@ -501,9 +495,7 @@ class UpdateRowsEvent(RowsEvent):
501495
"""
502496

503497
def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs):
504-
super(UpdateRowsEvent, self).__init__(
505-
from_packet, event_size, table_map, ctl_connection, **kwargs
506-
)
498+
super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs)
507499
if self._processed:
508500
# Body
509501
self.columns_present_bitmap = self.packet.read((self.number_of_columns + 7) / 8)
@@ -525,9 +517,7 @@ class TableMapEvent(BinLogEvent):
525517
"""
526518

527519
def __init__(self, from_packet, event_size, table_map, connection, **kwargs):
528-
super(TableMapEvent, self).__init__(
529-
from_packet, event_size, table_map, connection, **kwargs
530-
)
520+
super().__init__(from_packet, event_size, table_map, connection, **kwargs)
531521
self._only_tables = kwargs["only_tables"]
532522
self._ignored_tables = kwargs["ignored_tables"]
533523
self._only_schemas = kwargs["only_schemas"]
@@ -606,7 +596,7 @@ async def init(self):
606596
# to pymysqlreplication start, but replayed from binlog
607597
# from blowing up the service.
608598
column_schema = {
609-
"COLUMN_NAME": "__dropped_col_{i}__".format(i=i),
599+
"COLUMN_NAME": f"__dropped_col_{i}__",
610600
"COLLATION_NAME": None,
611601
"CHARACTER_SET_NAME": None,
612602
"COLUMN_COMMENT": None,

0 commit comments

Comments
 (0)