Skip to content

Commit 486e942

Browse files
committed
add support for checkpoint path
1 parent 4631d59 commit 486e942

File tree

6 files changed

+27
-12
lines changed

6 files changed

+27
-12
lines changed

.env.sample

+2
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@
99
# POLL_TIMEOUT=0.1
1010
# replication slot cleanup interval (in secs)
1111
# REPLICATION_SLOT_CLEANUP_INTERVAL=180
12+
# checkpoint file path
13+
# CHECKPOINT_PATH=./
1214

1315
# Elasticsearch
1416
# ELASTICSEARCH_SCHEME=http

pgsync/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@
44

55
__author__ = "Tolu Aina"
66
__email__ = "[email protected]"
7-
__version__ = "2.1.3"
7+
__version__ = "2.1.4"

pgsync/base.py

+11-9
Original file line numberDiff line numberDiff line change
@@ -928,7 +928,7 @@ def _get_foreign_keys(model_a, model_b):
928928
return foreign_keys
929929

930930

931-
def get_foreign_keys(node_a, node_b):
931+
def get_foreign_keys(node_a: Node, node_b: Node) -> dict:
932932
"""Return dict of single foreign key with multiple columns.
933933
934934
e.g:
@@ -939,7 +939,7 @@ def get_foreign_keys(node_a, node_b):
939939
940940
column_1, column_2, column_N are of type ForeignKeyContraint
941941
"""
942-
foreign_keys = {}
942+
foreign_keys: dict = {}
943943
# if either offers a foreign_key via relationship, use it!
944944
if (
945945
node_a.relationship.foreign_key.parent
@@ -1013,7 +1013,7 @@ def pg_engine(
10131013
return sa.create_engine(url, echo=echo, connect_args=connect_args)
10141014

10151015

1016-
def pg_execute(engine, query, values=None, options=None):
1016+
def pg_execute(engine, query, values=None, options=None) -> None:
10171017
options: dict = options or {"isolation_level": "AUTOCOMMIT"}
10181018
conn = engine.connect()
10191019
try:
@@ -1026,37 +1026,39 @@ def pg_execute(engine, query, values=None, options=None):
10261026
raise
10271027

10281028

1029-
def create_schema(engine, schema):
1029+
def create_schema(engine, schema) -> None:
10301030
"""Create database schema."""
10311031
if schema != SCHEMA:
10321032
engine.execute(sa.schema.CreateSchema(schema))
10331033

10341034

1035-
def create_database(database: str, echo: bool = False):
1035+
def create_database(database: str, echo: bool = False) -> None:
10361036
"""Create a database."""
10371037
logger.debug(f"Creating database: {database}")
10381038
engine = pg_engine(database="postgres", echo=echo)
10391039
pg_execute(engine, f'CREATE DATABASE "{database}"')
10401040
logger.debug(f"Created database: {database}")
10411041

10421042

1043-
def drop_database(database: str, echo: bool = False):
1043+
def drop_database(database: str, echo: bool = False) -> None:
10441044
"""Drop a database."""
10451045
logger.debug(f"Dropping database: {database}")
10461046
engine = pg_engine(database="postgres", echo=echo)
10471047
pg_execute(engine, f'DROP DATABASE IF EXISTS "{database}"')
10481048
logger.debug(f"Dropped database: {database}")
10491049

10501050

1051-
def create_extension(database: str, extension: str, echo: bool = False):
1051+
def create_extension(
1052+
database: str, extension: str, echo: bool = False
1053+
) -> None:
10521054
"""Create a database extension."""
10531055
logger.debug(f"Creating extension: {extension}")
10541056
engine = pg_engine(database=database, echo=echo)
10551057
pg_execute(engine, f'CREATE EXTENSION IF NOT EXISTS "{extension}"')
10561058
logger.debug(f"Created extension: {extension}")
10571059

10581060

1059-
def drop_extension(database: str, extension: str, echo: bool = False):
1061+
def drop_extension(database: str, extension: str, echo: bool = False) -> None:
10601062
"""Drop a database extension."""
10611063
logger.debug(f"Dropping extension: {extension}")
10621064
engine = pg_engine(database=database, echo=echo)
@@ -1066,7 +1068,7 @@ def drop_extension(database: str, extension: str, echo: bool = False):
10661068

10671069
def compiled_query(
10681070
query: str, label: Optional[str] = None, literal_binds: bool = False
1069-
):
1071+
) -> None:
10701072
"""Compile an SQLAlchemy query with an optional label."""
10711073
query: str = str(
10721074
query.compile(

pgsync/settings.py

+1
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
"REPLICATION_SLOT_CLEANUP_INTERVAL",
2323
default=180.0,
2424
)
25+
CHECKPOINT_PATH = env.str("CHECKPOINT_PATH", default="./")
2526

2627
# Elasticsearch:
2728
ELASTICSEARCH_SCHEME = env.str("ELASTICSEARCH_SCHEME", default="http")

pgsync/sync.py

+11-1
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
from .querybuilder import QueryBuilder
4343
from .redisqueue import RedisQueue
4444
from .settings import (
45+
CHECKPOINT_PATH,
4546
POLL_TIMEOUT,
4647
REDIS_POLL_INTERVAL,
4748
REPLICATION_SLOT_CLEANUP_INTERVAL,
@@ -80,7 +81,9 @@ def __init__(
8081
self._checkpoint: int = None
8182
self._plugins: Plugins = None
8283
self._truncate: bool = False
83-
self._checkpoint_file: str = f".{self.__name}"
84+
self._checkpoint_file: str = os.path.join(
85+
CHECKPOINT_PATH, f".{self.__name}"
86+
)
8487
self.redis: RedisQueue = RedisQueue(self.__name)
8588
self.tree: Tree = Tree(self)
8689
self._last_truncate_timestamp: datetime = datetime.now()
@@ -159,6 +162,13 @@ def validate(self, repl_slots: Optional[bool] = True) -> None:
159162
f'Make sure you have run the "bootstrap" command.'
160163
)
161164

165+
# ensure the checkpoint filepath is valid
166+
if not os.path.exists(CHECKPOINT_PATH):
167+
raise RuntimeError(
168+
f'Ensure the checkpoint path exists "{CHECKPOINT_PATH}" '
169+
f"and is readable ."
170+
)
171+
162172
root: Node = self.tree.build(self.nodes)
163173
root.display()
164174
for node in traverse_breadth_first(root):

setup.cfg

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[bumpversion]
2-
current_version = 2.1.3
2+
current_version = 2.1.4
33
commit = True
44
tag = True
55

0 commit comments

Comments
 (0)