@@ -240,7 +240,7 @@ def stat_update_check(st_old, st_curr):
240240        # in this case, we dispatched to wrong handler - abort 
241241        raise  BackupRaceConditionError ("file type changed (race condition), skipping file" )
242242    if  st_old .st_ino  !=  st_curr .st_ino :
243-         # in this case, the hardlinks -related code in create_helper has the wrong inode - abort! 
243+         # in this case, the hard-links -related code in create_helper has the wrong inode - abort! 
244244        raise  BackupRaceConditionError ("file inode changed (race condition), skipping file" )
245245    # looks ok, we are still dealing with the same thing - return current stat: 
246246    return  st_curr 
@@ -290,7 +290,7 @@ def preload_item_chunks(self, item, optimize_hardlinks=False):
290290        """ 
291291        Preloads the content data chunks of an item (if any). 
292292        optimize_hardlinks can be set to True if item chunks only need to be preloaded for 
293-         1st hardlink , but not for any further hardlink  to same inode / with same hlid. 
293+         1st hard link , but not for any further hard link  to same inode / with same hlid. 
294294        Returns True if chunks were preloaded. 
295295
296296        Warning: if data chunks are preloaded then all data chunks have to be retrieved, 
@@ -305,7 +305,7 @@ def preload_item_chunks(self, item, optimize_hardlinks=False):
305305                elif  hlid  in  self .hlids_preloaded :
306306                    preload_chunks  =  False 
307307                else :
308-                     # not having the hardlink 's chunks already preloaded for other hardlink  to same inode 
308+                     # not having the hard link 's chunks already preloaded for other hard link  to same inode 
309309                    preload_chunks  =  True 
310310                    self .hlids_preloaded .add (hlid )
311311            else :
@@ -730,15 +730,15 @@ def extract_helper(self, item, path, hlm, *, dry_run=False):
730730            link_target  =  hlm .retrieve (id = item .hlid )
731731            if  link_target  is  not   None  and  has_link :
732732                if  not  dry_run :
733-                     # another hardlink  to same inode (same hlid) was extracted previously, just link to it 
733+                     # another hard link  to same inode (same hlid) was extracted previously, just link to it 
734734                    with  backup_io ("link" ):
735735                        os .link (link_target , path , follow_symlinks = False )
736736                hardlink_set  =  True 
737737        yield  hardlink_set 
738738        if  not  hardlink_set :
739739            if  "hlid"  in  item  and  has_link :
740-                 # Update entry with extracted item path, so that following hardlinks  don't extract twice. 
741-                 # We have hardlinking support, so we will hardlink  not extract. 
740+                 # Update entry with extracted item path, so that following hard links  don't extract twice. 
741+                 # We have hardlinking support, so we will hard link  not extract. 
742742                hlm .remember (id = item .hlid , info = path )
743743            else :
744744                # Broken platform with no hardlinking support. 
@@ -765,7 +765,7 @@ def extract_item(
765765        :param dry_run: do not write any data 
766766        :param stdout: write extracted data to stdout 
767767        :param sparse: write sparse files (chunk-granularity, independent of the original being sparse) 
768-         :param hlm: maps hlid to link_target for extracting subtrees with hardlinks  correctly 
768+         :param hlm: maps hlid to link_target for extracting subtrees with hard links  correctly 
769769        :param pi: ProgressIndicatorPercent (or similar) for file extraction progress (in bytes) 
770770        :param continue_extraction: continue a previously interrupted extraction of the same archive 
771771        """ 
@@ -791,7 +791,7 @@ def same_item(item, st):
791791        if  dry_run  or  stdout :
792792            with  self .extract_helper (item , "" , hlm , dry_run = dry_run  or  stdout ) as  hardlink_set :
793793                if  not  hardlink_set :
794-                     # it does not really set hardlinks  due to dry_run, but we need to behave same 
794+                     # it does not really set hard links  due to dry_run, but we need to behave same 
795795                    # as non-dry_run concerning fetching preloaded chunks from the pipeline or 
796796                    # it would get stuck. 
797797                    if  "chunks"  in  item :
@@ -1248,7 +1248,7 @@ def create_helper(self, path, st, status=None, hardlinkable=True, strip_prefix=N
12481248        hl_chunks  =  None 
12491249        update_map  =  False 
12501250        if  hardlinked :
1251-             status  =  "h"   # hardlink  
1251+             status  =  "h"   # hard link  
12521252            nothing  =  object ()
12531253            chunks  =  self .hlm .retrieve (id = (st .st_ino , st .st_dev ), default = nothing )
12541254            if  chunks  is  nothing :
@@ -1261,7 +1261,7 @@ def create_helper(self, path, st, status=None, hardlinkable=True, strip_prefix=N
12611261        self .add_item (item , stats = self .stats )
12621262        if  update_map :
12631263            # remember the hlid of this fs object and if the item has chunks, 
1264-             # also remember them, so we do not have to re-chunk a hardlink . 
1264+             # also remember them, so we do not have to re-chunk a hard link . 
12651265            chunks  =  item .chunks  if  "chunks"  in  item  else  None 
12661266            self .hlm .remember (id = (st .st_ino , st .st_dev ), info = chunks )
12671267
@@ -1394,13 +1394,13 @@ def process_file(self, *, path, parent_fd, name, st, cache, flags=flags_normal,
13941394                    # this needs to be done early, so that part files also get the patched mode. 
13951395                    item .mode  =  stat .S_IFREG  |  stat .S_IMODE (item .mode )
13961396                # we begin processing chunks now. 
1397-                 if  hl_chunks  is  not   None :  # create_helper gave us chunks from a previous hardlink  
1397+                 if  hl_chunks  is  not   None :  # create_helper gave us chunks from a previous hard link  
13981398                    item .chunks  =  []
13991399                    for  chunk_id , chunk_size  in  hl_chunks :
14001400                        # process one-by-one, so we will know in item.chunks how far we got 
14011401                        chunk_entry  =  cache .reuse_chunk (chunk_id , chunk_size , self .stats )
14021402                        item .chunks .append (chunk_entry )
1403-                 else :  # normal case, no "2nd+" hardlink  
1403+                 else :  # normal case, no "2nd+" hard link  
14041404                    if  not  is_special_file :
14051405                        hashed_path  =  safe_encode (item .path )  # path as in archive item! 
14061406                        started_hashing  =  time .monotonic ()
0 commit comments