Skip to content

Commit 5aa536d

Browse files
spelling: hard link(s)
1 parent 2c7bec0 commit 5aa536d

File tree

15 files changed

+61
-60
lines changed

15 files changed

+61
-60
lines changed

src/borg/archive.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def stat_update_check(st_old, st_curr):
240240
# in this case, we dispatched to wrong handler - abort
241241
raise BackupRaceConditionError("file type changed (race condition), skipping file")
242242
if st_old.st_ino != st_curr.st_ino:
243-
# in this case, the hardlinks-related code in create_helper has the wrong inode - abort!
243+
# in this case, the hard-links-related code in create_helper has the wrong inode - abort!
244244
raise BackupRaceConditionError("file inode changed (race condition), skipping file")
245245
# looks ok, we are still dealing with the same thing - return current stat:
246246
return st_curr
@@ -290,7 +290,7 @@ def preload_item_chunks(self, item, optimize_hardlinks=False):
290290
"""
291291
Preloads the content data chunks of an item (if any).
292292
optimize_hardlinks can be set to True if item chunks only need to be preloaded for
293-
1st hardlink, but not for any further hardlink to same inode / with same hlid.
293+
1st hard link, but not for any further hard link to same inode / with same hlid.
294294
Returns True if chunks were preloaded.
295295
296296
Warning: if data chunks are preloaded then all data chunks have to be retrieved,
@@ -305,7 +305,7 @@ def preload_item_chunks(self, item, optimize_hardlinks=False):
305305
elif hlid in self.hlids_preloaded:
306306
preload_chunks = False
307307
else:
308-
# not having the hardlink's chunks already preloaded for other hardlink to same inode
308+
# not having the hard link's chunks already preloaded for other hard link to same inode
309309
preload_chunks = True
310310
self.hlids_preloaded.add(hlid)
311311
else:
@@ -730,15 +730,15 @@ def extract_helper(self, item, path, hlm, *, dry_run=False):
730730
link_target = hlm.retrieve(id=item.hlid)
731731
if link_target is not None and has_link:
732732
if not dry_run:
733-
# another hardlink to same inode (same hlid) was extracted previously, just link to it
733+
# another hard link to same inode (same hlid) was extracted previously, just link to it
734734
with backup_io("link"):
735735
os.link(link_target, path, follow_symlinks=False)
736736
hardlink_set = True
737737
yield hardlink_set
738738
if not hardlink_set:
739739
if "hlid" in item and has_link:
740-
# Update entry with extracted item path, so that following hardlinks don't extract twice.
741-
# We have hardlinking support, so we will hardlink not extract.
740+
# Update entry with extracted item path, so that following hard links don't extract twice.
741+
# We have hardlinking support, so we will hard link not extract.
742742
hlm.remember(id=item.hlid, info=path)
743743
else:
744744
# Broken platform with no hardlinking support.
@@ -765,7 +765,7 @@ def extract_item(
765765
:param dry_run: do not write any data
766766
:param stdout: write extracted data to stdout
767767
:param sparse: write sparse files (chunk-granularity, independent of the original being sparse)
768-
:param hlm: maps hlid to link_target for extracting subtrees with hardlinks correctly
768+
:param hlm: maps hlid to link_target for extracting subtrees with hard links correctly
769769
:param pi: ProgressIndicatorPercent (or similar) for file extraction progress (in bytes)
770770
:param continue_extraction: continue a previously interrupted extraction of the same archive
771771
"""
@@ -791,7 +791,7 @@ def same_item(item, st):
791791
if dry_run or stdout:
792792
with self.extract_helper(item, "", hlm, dry_run=dry_run or stdout) as hardlink_set:
793793
if not hardlink_set:
794-
# it does not really set hardlinks due to dry_run, but we need to behave same
794+
# it does not really set hard links due to dry_run, but we need to behave same
795795
# as non-dry_run concerning fetching preloaded chunks from the pipeline or
796796
# it would get stuck.
797797
if "chunks" in item:
@@ -1248,7 +1248,7 @@ def create_helper(self, path, st, status=None, hardlinkable=True, strip_prefix=N
12481248
hl_chunks = None
12491249
update_map = False
12501250
if hardlinked:
1251-
status = "h" # hardlink
1251+
status = "h" # hard link
12521252
nothing = object()
12531253
chunks = self.hlm.retrieve(id=(st.st_ino, st.st_dev), default=nothing)
12541254
if chunks is nothing:
@@ -1261,7 +1261,7 @@ def create_helper(self, path, st, status=None, hardlinkable=True, strip_prefix=N
12611261
self.add_item(item, stats=self.stats)
12621262
if update_map:
12631263
# remember the hlid of this fs object and if the item has chunks,
1264-
# also remember them, so we do not have to re-chunk a hardlink.
1264+
# also remember them, so we do not have to re-chunk a hard link.
12651265
chunks = item.chunks if "chunks" in item else None
12661266
self.hlm.remember(id=(st.st_ino, st.st_dev), info=chunks)
12671267

@@ -1394,13 +1394,13 @@ def process_file(self, *, path, parent_fd, name, st, cache, flags=flags_normal,
13941394
# this needs to be done early, so that part files also get the patched mode.
13951395
item.mode = stat.S_IFREG | stat.S_IMODE(item.mode)
13961396
# we begin processing chunks now.
1397-
if hl_chunks is not None: # create_helper gave us chunks from a previous hardlink
1397+
if hl_chunks is not None: # create_helper gave us chunks from a previous hard link
13981398
item.chunks = []
13991399
for chunk_id, chunk_size in hl_chunks:
14001400
# process one-by-one, so we will know in item.chunks how far we got
14011401
chunk_entry = cache.reuse_chunk(chunk_id, chunk_size, self.stats)
14021402
item.chunks.append(chunk_entry)
1403-
else: # normal case, no "2nd+" hardlink
1403+
else: # normal case, no "2nd+" hard link
14041404
if not is_special_file:
14051405
hashed_path = safe_encode(item.path) # path as in archive item!
14061406
started_hashing = time.monotonic()

src/borg/archiver/create_cmd.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -696,7 +696,7 @@ def build_parser_create(self, subparsers, common_parser, mid_common_parser):
696696
- 'd' = directory
697697
- 'b' = block device
698698
- 'c' = char device
699-
- 'h' = regular file, hardlink (to already seen inodes)
699+
- 'h' = regular file, hard link (to already seen inodes)
700700
- 's' = symlink
701701
- 'f' = fifo
702702

src/borg/archiver/tar_cmds.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,8 +140,8 @@ def item_to_tarinfo(item, original_path):
140140
tarinfo.uname = item.get("user", "")
141141
tarinfo.gname = item.get("group", "")
142142
# The linkname in tar has 2 uses:
143-
# for symlinks it means the destination, while for hardlinks it refers to the file.
144-
# Since hardlinks in tar have a different type code (LNKTYPE) the format might
143+
# for symlinks it means the destination, while for hard links it refers to the file.
144+
# Since hard links in tar have a different type code (LNKTYPE) the format might
145145
# support hardlinking arbitrary objects (including symlinks and directories), but
146146
# whether implementations actually support that is a whole different question...
147147
tarinfo.linkname = ""
@@ -152,7 +152,7 @@ def item_to_tarinfo(item, original_path):
152152
if "hlid" in item:
153153
linkname = hlm.retrieve(id=item.hlid)
154154
if linkname is not None:
155-
# the first hardlink was already added to the archive, add a tar-hardlink reference to it.
155+
# the first hard link was already added to the archive, add a tar-hard-link reference to it.
156156
tarinfo.type = tarfile.LNKTYPE
157157
tarinfo.linkname = linkname
158158
else:
@@ -309,7 +309,7 @@ def _import_tar(self, args, repository, manifest, key, cache, tarstream):
309309
elif tarinfo.issym():
310310
status = tfo.process_symlink(tarinfo=tarinfo, status="s", type=stat.S_IFLNK)
311311
elif tarinfo.islnk():
312-
# tar uses a hardlink model like: the first instance of a hardlink is stored as a regular file,
312+
# tar uses a hard link model like: the first instance of a hard link is stored as a regular file,
313313
# later instances are special entries referencing back to the first instance.
314314
status = tfo.process_hardlink(tarinfo=tarinfo, status="h", type=stat.S_IFREG)
315315
elif tarinfo.isblk():

src/borg/fuse.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -422,7 +422,7 @@ def make_versioned_name(name, version, add_dir=False):
422422
else:
423423
inode = item_inode
424424
self._items[inode] = item
425-
# remember extracted item path, so that following hardlinks don't extract twice.
425+
# remember extracted item path, so that following hard links don't extract twice.
426426
hlm.remember(id=item.hlid, info=path)
427427
else:
428428
inode = item_inode

src/borg/helpers/fs.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -320,25 +320,26 @@ def to_sanitized_path(path):
320320

321321
class HardLinkManager:
322322
"""
323-
Manage hardlinks (and avoid code duplication doing so).
323+
Manage hard links (and avoid code duplication doing so).
324324
325325
A) When creating a borg2 archive from the filesystem, we have to maintain a mapping like:
326326
(dev, ino) -> (hlid, chunks) # for fs_hl_targets
327327
If we encounter the same (dev, ino) again later, we'll just re-use the hlid and chunks list.
328328
329329
B) When extracting a borg2 archive to the filesystem, we have to maintain a mapping like:
330330
hlid -> path
331-
If we encounter the same hlid again later, we hardlink to the path of the already extracted content of same hlid.
331+
If we encounter the same hlid again later, we hard link to the path of the already extracted
332+
content of same hlid.
332333
333334
C) When transferring from a borg1 archive, we need:
334335
path -> chunks_correct # for borg1_hl_targets, chunks_correct must be either from .chunks_healthy or .chunks.
335336
If we encounter a regular file item with source == path later, we reuse chunks_correct
336337
and create the same hlid = hardlink_id_from_path(source).
337338
338-
D) When importing a tar file (simplified 1-pass way for now, not creating borg hardlink items):
339+
D) When importing a tar file (simplified 1-pass way for now, not creating borg hard link items):
339340
path -> chunks
340341
If we encounter a LNK tar entry later with linkname==path, we re-use the chunks and create a regular file item.
341-
For better hardlink support (including the very first hardlink item for each group of same-target hardlinks),
342+
For better hard link support (including the very first hard link item for each group of same-target hard links),
342343
we would need a 2-pass processing, which is not yet implemented.
343344
"""
344345

@@ -357,12 +358,12 @@ def borg1_hardlink_slave(self, item): # legacy
357358
return "source" in item and self.borg1_hardlinkable(item.mode)
358359

359360
def hardlink_id_from_path(self, path):
360-
"""compute a hardlink id from a path"""
361+
"""compute a hard link id from a path"""
361362
assert isinstance(path, str)
362363
return hashlib.sha256(path.encode("utf-8", errors="surrogateescape")).digest()
363364

364365
def hardlink_id_from_inode(self, *, ino, dev):
365-
"""compute a hardlink id from an inode"""
366+
"""compute a hard link id from an inode"""
366367
assert isinstance(ino, int)
367368
assert isinstance(dev, int)
368369
return hashlib.sha256(f"{ino}/{dev}".encode()).digest()
@@ -414,11 +415,11 @@ def secure_erase(path, *, avoid_collateral_damage):
414415
415416
If avoid_collateral_damage is True, we only secure erase if the total link count is 1,
416417
otherwise we just do a normal "delete" (unlink) without first overwriting it with random.
417-
This avoids other hardlinks pointing to same inode as <path> getting damaged, but might be less secure.
418-
A typical scenario where this is useful are quick "hardlink copies" of bigger directories.
418+
This avoids other hard links pointing to same inode as <path> getting damaged, but might be less secure.
419+
A typical scenario where this is useful are quick "hard link copies" of bigger directories.
419420
420421
If avoid_collateral_damage is False, we always secure erase.
421-
If there are hardlinks pointing to the same inode as <path>, they will contain random garbage afterwards.
422+
If there are hard links pointing to the same inode as <path>, they will contain random garbage afterwards.
422423
"""
423424
path_obj = Path(path)
424425
with path_obj.open("r+b") as fd:
@@ -435,7 +436,7 @@ def safe_unlink(path):
435436
Safely unlink (delete) *path*.
436437
437438
If we run out of space while deleting the file, we try truncating it first.
438-
BUT we truncate only if path is the only hardlink referring to this content.
439+
BUT we truncate only if path is the only hard link referring to this content.
439440
440441
Use this when deleting potentially large files when recovering
441442
from a VFS error such as ENOSPC. It can help a full file system
@@ -452,9 +453,9 @@ def safe_unlink(path):
452453
# we ran out of space while trying to delete the file.
453454
st = path_obj.stat()
454455
if st.st_nlink > 1:
455-
# rather give up here than cause collateral damage to the other hardlink.
456+
# rather give up here than cause collateral damage to the other hard link.
456457
raise
457-
# no other hardlink! try to recover free space by truncating this file.
458+
# no other hard link! try to recover free space by truncating this file.
458459
try:
459460
# Do not create *path* if it does not exist, open for truncation in r+b mode (=O_RDWR|O_BINARY).
460461
with open(path, "r+b") as fd:

src/borg/helpers/parseformat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -945,7 +945,7 @@ def calculate_num_chunks(self, item):
945945
return len(item.get("chunks", []))
946946

947947
def calculate_size(self, item):
948-
# note: does not support hardlink slaves, they will be size 0
948+
# note: does not support hard link slaves, they will be size 0
949949
return item.get_size()
950950

951951
def hash_item(self, hash_function, item):

src/borg/legacyrepository.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ def save_config(self, path, config):
338338

339339
if os.path.isfile(config_path):
340340
link_error_msg = (
341-
"Failed to erase old repository config file securely (hardlinks not supported). "
341+
"Failed to erase old repository config file securely (hard links not supported). "
342342
"Old repokey data, if any, might persist on physical storage."
343343
)
344344
try:

src/borg/testsuite/archiver/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "archiver"))
4040
src_file = "archiver/__init__.py" # relative path of one file in src_dir
4141

42-
requires_hardlinks = pytest.mark.skipif(not are_hardlinks_supported(), reason="hardlinks not supported")
42+
requires_hardlinks = pytest.mark.skipif(not are_hardlinks_supported(), reason="hard links not supported")
4343

4444

4545
def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b"", binary_output=False, **kw):

src/borg/testsuite/archiver/extract_cmd_test.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def test_symlink_extract(archivers, request):
4646

4747
@pytest.mark.skipif(
4848
not are_symlinks_supported() or not are_hardlinks_supported() or is_darwin,
49-
reason="symlinks or hardlinks or hardlinked symlinks not supported",
49+
reason="symbolic links or hard links or hard-linked sym-links not supported",
5050
)
5151
def test_hardlinked_symlinks_extract(archivers, request):
5252
archiver = request.getfixturevalue(archivers)
@@ -323,7 +323,7 @@ def test_extract_hardlinks_twice(archivers, request):
323323
# if issue #5603 happens, extraction gives rc == 1 (triggering AssertionError) and warnings like:
324324
# input/a/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/a/hardlink'
325325
# input/b/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/b/hardlink'
326-
# otherwise, when fixed, the hardlinks should be there and have a link count of 2
326+
# otherwise, when fixed, the hard links should be there and have a link count of 2
327327
assert os.stat("input/a/hardlink").st_nlink == 2
328328
assert os.stat("input/b/hardlink").st_nlink == 2
329329

@@ -690,12 +690,12 @@ def test_extract_continue(archivers, request):
690690
file1_st = os.stat("input/file1")
691691
# simulate a partially extracted file2 (smaller size, archived mtime not yet set)
692692
file2_st = os.stat("input/file2")
693-
# make a hardlink, so it does not free the inode when unlinking input/file2
693+
# make a hard link, so it does not free the inode when unlinking input/file2
694694
os.link("input/file2", "hardlink-to-keep-inode-f2")
695695
os.truncate("input/file2", 123) # -> incorrect size, incorrect mtime
696696
# simulate file3 has not yet been extracted
697697
file3_st = os.stat("input/file3")
698-
# make a hardlink, so it does not free the inode when unlinking input/file3
698+
# make a hard link, so it does not free the inode when unlinking input/file3
699699
os.link("input/file3", "hardlink-to-keep-inode-f3")
700700
os.remove("input/file3")
701701
time.sleep(1) # needed due to timestamp granularity of apple hfs+

src/borg/testsuite/archiver/mount_cmds_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,12 +113,12 @@ def has_noatime(some_file):
113113
assert same_ts_ns(sti1.st_ctime * 1e9, sto1.st_ctime * 1e9)
114114
assert same_ts_ns(sti1.st_mtime * 1e9, sto1.st_mtime * 1e9)
115115
if are_hardlinks_supported():
116-
# note: there is another hardlink to this, see below
116+
# note: there is another hard link to this, see below
117117
assert sti1.st_nlink == sto1.st_nlink == 2
118118
# read
119119
with open(in_fn, "rb") as in_f, open(out_fn, "rb") as out_f:
120120
assert in_f.read() == out_f.read()
121-
# hardlink (to 'input/file1')
121+
# hard link (to 'input/file1')
122122
if are_hardlinks_supported():
123123
in_fn = "input/hardlink"
124124
out_fn = os.path.join(mountpoint, "archive", "input", "hardlink")
@@ -191,7 +191,7 @@ def test_fuse_versions_view(archivers, request):
191191
hl3 = os.path.join(mountpoint, "input", "hardlink3", "hardlink3.00001")
192192
assert os.stat(hl1).st_ino == os.stat(hl2).st_ino == os.stat(hl3).st_ino
193193
assert open(hl3, "rb").read() == b"123456"
194-
# similar again, but exclude the 1st hardlink:
194+
# similar again, but exclude the 1st hard link:
195195
with fuse_mount(archiver, mountpoint, "-o", "versions", "-e", "input/hardlink1"):
196196
if are_hardlinks_supported():
197197
hl2 = os.path.join(mountpoint, "input", "hardlink2", "hardlink2.00001")

0 commit comments

Comments
 (0)