3636from trinity .protocol .les .peer import LESPeer
3737from trinity .utils .datastructures import TaskQueue
3838
39-
4039HeaderRequestingPeer = Union [LESPeer , ETHPeer ]
4140
4241
@@ -49,8 +48,6 @@ class BaseHeaderChainSyncer(BaseService, PeerSubscriber):
4948 """
5049 # We'll only sync if we are connected to at least min_peers_to_sync.
5150 min_peers_to_sync = 1
52- # Post-processing steps can exit out of sync (for example, fast sync) by triggering this token:
53- complete_token = None
5451 # TODO: Instead of a fixed timeout, we should use a variable one that gets adjusted based on
5552 # the round-trip times from our download requests.
5653 _reply_timeout = 60
@@ -64,12 +61,7 @@ def __init__(self,
6461 db : AsyncHeaderDB ,
6562 peer_pool : PeerPool ,
6663 token : CancelToken = None ) -> None :
67- self .complete_token = CancelToken ('trinity.sync.common.BaseHeaderChainSyncer.SyncCompleted' )
68- if token is None :
69- master_service_token = self .complete_token
70- else :
71- master_service_token = token .chain (self .complete_token )
72- super ().__init__ (master_service_token )
64+ super ().__init__ (token )
7365 self .chain = chain
7466 self .db = db
7567 self .peer_pool = peer_pool
@@ -146,7 +138,7 @@ async def sync(self, peer: HeaderRequestingPeer) -> None:
146138 try :
147139 await self ._sync (peer )
148140 except OperationCancelled as e :
149- self .logger .info ("Sync with %s aborted : %s" , peer , e )
141+ self .logger .info ("Sync with %s was shut down : %s" , peer , e )
150142 finally :
151143 self ._syncing = False
152144
@@ -199,7 +191,23 @@ async def _sync(self, peer: HeaderRequestingPeer) -> None:
199191 break
200192
201193 if not headers :
202- self .logger .info ("Got no new headers from %s, aborting sync" , peer )
194+ if last_received_header is None :
195+ request_parent = head
196+ else :
197+ request_parent = last_received_header
198+ if head_td < peer .head_td :
199+ # peer claims to have a better header, but didn't return it. Boot peer
200+ # TODO ... also blacklist, because it keeps trying to reconnect
201+ self .logger .warning (
202+ "%s announced difficulty %s, but didn't return any headers after %r@%s" ,
203+ peer ,
204+ peer .head_td ,
205+ request_parent ,
206+ head_td ,
207+ )
208+ await peer .disconnect (DisconnectReason .subprotocol_error )
209+ else :
210+ self .logger .info ("Got no new headers from %s, aborting sync" , peer )
203211 break
204212
205213 first = headers [0 ]
@@ -235,24 +243,17 @@ async def _sync(self, peer: HeaderRequestingPeer) -> None:
235243 await peer .disconnect (DisconnectReason .subprotocol_error )
236244 break
237245
246+ for header in headers :
247+ head_td += header .difficulty
248+
238249 # Setting the latest header hash for the peer, before queuing header processing tasks
239250 self ._target_header_hash = peer .head_hash
240251
241- unrequested_headers = tuple (h for h in headers if h not in self .header_queue )
242- await self .header_queue .add (unrequested_headers )
252+ new_headers = tuple (h for h in headers if h not in self .header_queue )
253+ await self .header_queue .add (new_headers )
243254 last_received_header = headers [- 1 ]
244255 start_at = last_received_header .block_number + 1
245256
246- # erase any pending tasks, to restart on next _sync() run
247- try :
248- batch_id , pending_tasks = self .header_queue .get_nowait ()
249- except asyncio .QueueFull :
250- # nothing pending, continue
251- pass
252- else :
253- # fully remove pending tasks from queue
254- self .header_queue .complete (batch_id , pending_tasks )
255-
256257 async def _fetch_missing_headers (
257258 self , peer : HeaderRequestingPeer , start_at : int ) -> Tuple [BlockHeader , ...]:
258259 """Fetch a batch of headers starting at start_at and return the ones we're missing."""
@@ -273,12 +274,15 @@ async def get_missing_tail(self: 'BaseHeaderChainSyncer',
273274 ) -> AsyncGenerator [BlockHeader , None ]:
274275 iter_headers = iter (headers )
275276 for header in iter_headers :
276- is_missing = not await self .wait (self .db .coro_header_exists (header .hash ))
277- if is_missing :
277+ if header in self .header_queue :
278+ self .logger .debug ("Discarding header that is already queued: %s" , header )
279+ continue
280+ is_present = await self .wait (self .db .coro_header_exists (header .hash ))
281+ if is_present :
282+ self .logger .debug ("Discarding header that we already have: %s" , header )
283+ else :
278284 yield header
279285 break
280- else :
281- self .logger .debug ("Discarding header that we already have: %s" , header )
282286
283287 for header in iter_headers :
284288 yield header
0 commit comments