Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
452145d
test: add reorg testing framework
MegaRedHand Sep 22, 2025
547fff8
test: add additional checks
MegaRedHand Sep 23, 2025
89f5548
refactor: split into two files
MegaRedHand Sep 23, 2025
496bfaf
feat: connect peers via P2P
MegaRedHand Sep 23, 2025
e685093
test: add second test
MegaRedHand Sep 23, 2025
05bf17d
fix: handle syncing status
MegaRedHand Sep 23, 2025
db158b4
chore: remove unused import
MegaRedHand Sep 23, 2025
8ed7a6c
feat: improve error log
MegaRedHand Sep 23, 2025
f3fe845
docs: add readme with instructions to run it
MegaRedHand Sep 23, 2025
ffd27bd
fix: improve multi-test runs
MegaRedHand Sep 23, 2025
0741172
ci: run reorg tests in CI
MegaRedHand Sep 23, 2025
df64404
Merge branch 'main' into add-reorg-framework
MegaRedHand Sep 23, 2025
de50cc8
feat: print ethrex version when running tests
MegaRedHand Sep 23, 2025
1726494
chore: comment failing test
MegaRedHand Sep 23, 2025
9c6774d
chore: fix clippy lint
MegaRedHand Sep 24, 2025
032970d
Merge branch 'main' into add-reorg-framework
MegaRedHand Sep 24, 2025
ec108d8
Merge branch 'main' into add-reorg-framework
MegaRedHand Sep 24, 2025
0ea4e9b
Merge branch 'main' into add-reorg-framework
MegaRedHand Sep 25, 2025
3fec4a2
Merge branch 'main' into add-reorg-framework
MegaRedHand Sep 25, 2025
9279bf0
fix: differentiate blocks according to builder
MegaRedHand Sep 25, 2025
e8d65ca
chore: comment failing test again
MegaRedHand Sep 25, 2025
a8e2e22
test(l1): add storage reorg test
MegaRedHand Sep 24, 2025
2a75471
refactor: use one datadir and logs file per test
MegaRedHand Sep 25, 2025
1326359
test(l1): add full-sync smoke test
MegaRedHand Sep 25, 2025
9d63811
Update fork choice in extend chain
gianbelinche Sep 26, 2025
c0da247
Merge branch 'main' into add-storage-slot-reorg-test
MegaRedHand Sep 26, 2025
16d96f9
Merge branch 'add-storage-slot-reorg-test' into test/full-sync-withou…
MegaRedHand Sep 26, 2025
eca6c2d
Add reorg test
gianbelinche Sep 26, 2025
dd1ec4e
Add loop for flaky test
gianbelinche Sep 26, 2025
51e4b97
test: add additional check to test
MegaRedHand Sep 26, 2025
36d57ee
docs: add comment
MegaRedHand Sep 26, 2025
fbaa687
feat: use unique ports across each run
MegaRedHand Sep 26, 2025
1b90a6d
include requesting headers from newToOld
cdiielsi Sep 26, 2025
357fd68
update ci test pattern
cdiielsi Sep 26, 2025
4b8d362
fix pattern on ci
cdiielsi Sep 26, 2025
6f9e843
Merge branch 'main' into solve-reorgs-when-current-head-is-not-in-sid…
cdiielsi Sep 29, 2025
c1a9099
Merge branch 'add-storage-slot-reorg-test' into test/full-sync-reorg
gianbelinche Sep 29, 2025
34fd052
Added timestamp for discerning logs
gianbelinche Sep 29, 2025
a705dc0
Re add failing test
gianbelinche Sep 29, 2025
0c1c33e
Remove comment
gianbelinche Sep 29, 2025
16ae14c
Remove unused
gianbelinche Sep 29, 2025
d7681f3
Add reorg back to base test
gianbelinche Sep 29, 2025
d937460
Add reorg test with ancestor
gianbelinche Sep 29, 2025
f5362b0
Merge branch 'main' into test/full-sync-reorg
gianbelinche Sep 29, 2025
beb6a9b
Merge branch 'main' into solve-reorgs-when-current-head-is-not-in-sid…
cdiielsi Sep 29, 2025
fd976b8
refactor previous solution
cdiielsi Sep 30, 2025
cb010f9
update current_head and run clippy
cdiielsi Sep 30, 2025
80be0b2
drop current_head update since it's not needed
cdiielsi Sep 30, 2025
0a21086
Merge branch 'main' into test/full-sync-reorg
gianbelinche Sep 30, 2025
7164550
Merge branch 'main' into solve-reorgs-when-current-head-is-not-in-sid…
cdiielsi Sep 30, 2025
9443077
Merge branch 'solve-reorgs-when-current-head-is-not-in-sidechain' int…
gianbelinche Sep 30, 2025
a73dfae
Add 1024 reorg test
gianbelinche Oct 1, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pr-main_l1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ jobs:
ethrex_flags: ""
- name: "Engine withdrawal tests"
simulation: ethereum/engine
test_pattern: "engine-withdrawals/Corrupted Block Hash Payload|Empty Withdrawals|engine-withdrawals test loader|GetPayloadBodies|GetPayloadV2 Block Value|Max Initcode Size|Sync after 2 blocks - Withdrawals on Genesis|Withdraw many accounts|Withdraw to a single account|Withdraw to two accounts|Withdraw zero amount|Withdraw many accounts|Withdrawals Fork on Block 1 - 1 Block Re-Org|Withdrawals Fork on Block 1 - 8 Block Re-Org NewPayload|Withdrawals Fork on Block 2|Withdrawals Fork on Block 3|Withdrawals Fork on Block 8 - 10 Block Re-Org NewPayload|Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org [^S]|Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org [^S]"
test_pattern: "engine-withdrawals/Corrupted Block Hash Payload|Empty Withdrawals|engine-withdrawals test loader|GetPayloadBodies|GetPayloadV2 Block Value|Max Initcode Size|Sync after 2 blocks - Withdrawals on Genesis|Withdraw many accounts|Withdraw to a single account|Withdraw to two accounts|Withdraw zero amount|Withdraw many accounts|Withdrawals Fork on Block 1 - 1 Block Re-Org|Withdrawals Fork on Block 1 - 8 Block Re-Org |Withdrawals Fork on Block 2|Withdrawals Fork on Block 3|Withdrawals Fork on Block 8 - 10 Block Re-Org |Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org |Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org"
- name: "Sync full"
simulation: ethereum/sync
test_pattern: ""
Expand Down
218 changes: 141 additions & 77 deletions crates/networking/p2p/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -336,96 +336,160 @@ impl Syncer {
current_head, sync_head
);

loop {
debug!("Sync Log 1: In Full Sync");
debug!(
"Sync Log 3: State current headers len {}",
block_sync_state.current_headers.len()
);
debug!(
"Sync Log 4: State current blocks len {}",
block_sync_state.current_blocks.len()
);
info!("Sync Log 1: In Full Sync");
debug!(
"Sync Log 3: State current headers len {}",
block_sync_state.current_headers.len()
);
debug!(
"Sync Log 4: State current blocks len {}",
block_sync_state.current_blocks.len()
);

debug!("Requesting Block Headers from {current_head}");
debug!("Requesting Block Headers from NewToOld from sync_head {sync_head}");

let Some(mut block_headers) = self
.peers
.request_block_headers_from_hash(current_head, BlockRequestOrder::OldToNew)
.await
else {
warn!("Sync failed to find target block header, aborting");
debug!("Sync Log 8: Sync failed to find target block header, aborting");
return Ok(());
let requested_header =
if let Some(sync_head_block) = store.get_pending_block(sync_head).await? {
sync_head_block.header.parent_hash
} else {
sync_head
};

debug!("Sync Log 9: Received {} block headers", block_headers.len());
let Some(mut block_headers) = self
.peers
.request_block_headers_from_hash(requested_header, BlockRequestOrder::NewToOld)
.await
else {
// sync_head or sync_head parent was not found
warn!("Sync failed to find target block header, aborting");
debug!("Sync Log 8: Sync failed to find target block header, aborting");
return Ok(());
};

let (first_block_hash, first_block_number, first_block_parent_hash) =
match block_headers.first() {
Some(header) => (header.hash(), header.number, header.parent_hash),
None => continue,
};
let (last_block_hash, last_block_number) = match block_headers.last() {
Some(header) => (header.hash(), header.number),
None => continue,
};
// TODO(#2126): This is just a temporary solution to avoid a bug where the sync would get stuck
// on a loop when the target head is not found, i.e. on a reorg with a side-chain.
if first_block_hash == last_block_hash
&& first_block_hash == current_head
&& current_head != sync_head
debug!("Sync Log 9: Received {} block headers", block_headers.len());

let mut found_common_ancestor = false;
for i in 0..block_headers.len() {
if store
.get_block_by_hash(block_headers[i].hash())
.await?
.is_some()
{
// There is no path to the sync head this goes back until it find a common ancerstor
warn!("Sync failed to find target block header, going back to the previous parent");
current_head = first_block_parent_hash;
continue;
block_headers.drain(i..);
found_common_ancestor = true;
break;
}
}

debug!(
"Received {} block headers| First Number: {} Last Number: {}",
block_headers.len(),
first_block_number,
last_block_number
);
if found_common_ancestor {
block_headers.reverse();
block_sync_state
.process_incoming_headers(
block_headers,
sync_head,
true, // sync_head_found is true because of the NewToOld headers request
self.blockchain.clone(),
self.peers.clone(),
self.cancel_token.clone(),
)
.await?;
Ok(())
} else {
// if found_common_ancestor is false that means we are more than 1024 blocks behind so, for now, we go back to syncing as it follows.
// TODO: Have full syncing always be from NewToOld
loop {
info!("Sync Log 1: In Full Sync");
debug!(
"Sync Log 3: State current headers len {}",
block_sync_state.current_headers.len()
);
debug!(
"Sync Log 4: State current blocks len {}",
block_sync_state.current_blocks.len()
);

// Filter out everything after the sync_head
let mut sync_head_found = false;
if let Some(index) = block_headers
.iter()
.position(|header| header.hash() == sync_head)
{
sync_head_found = true;
block_headers.drain(index + 1..);
}
debug!("Requesting Block Headers from OldToNew from current_head {current_head}");

// Update current fetch head
current_head = last_block_hash;
let Some(mut block_headers) = self
.peers
.request_block_headers_from_hash(current_head, BlockRequestOrder::OldToNew)
.await
else {
warn!("Sync failed to find target block header, aborting");
debug!("Sync Log 8: Sync failed to find target block header, aborting");
return Ok(());
};

// Discard the first header as we already have it
block_headers.remove(0);
if !block_headers.is_empty() {
let mut finished = false;
while !finished {
(finished, sync_head_found) = block_sync_state
.process_incoming_headers(
block_headers.clone(),
sync_head,
sync_head_found,
self.blockchain.clone(),
self.peers.clone(),
self.cancel_token.clone(),
)
.await?;
block_headers.clear();
debug!("Sync Log 9: Received {} block headers", block_headers.len());

let (first_block_hash, first_block_number, first_block_parent_hash) =
match block_headers.first() {
Some(header) => (header.hash(), header.number, header.parent_hash),
None => continue,
};
let (last_block_hash, last_block_number) = match block_headers.last() {
Some(header) => (header.hash(), header.number),
None => continue,
};
// TODO(#2126): This is just a temporary solution to avoid a bug where the sync would get stuck
// on a loop when the target head is not found, i.e. on a reorg with a side-chain.
if first_block_hash == last_block_hash
&& first_block_hash == current_head
&& current_head != sync_head
{
// There is no path to the sync head this goes back until it find a common ancerstor
warn!(
"Sync failed to find target block header, going back to the previous parent"
);
current_head = first_block_parent_hash;
continue;
}
}

if sync_head_found {
break;
};
debug!(
"Received {} block headers| First Number: {} Last Number: {}",
block_headers.len(),
first_block_number,
last_block_number
);

// Filter out everything after the sync_head
let mut sync_head_found = false;
if let Some(index) = block_headers
.iter()
.position(|header| header.hash() == sync_head)
{
sync_head_found = true;
block_headers.drain(index + 1..);
}

// Update current fetch head
current_head = last_block_hash;

// Discard the first header as we already have it
block_headers.remove(0);
if !block_headers.is_empty() {
let mut finished = false;
while !finished {
(finished, sync_head_found) = block_sync_state
.process_incoming_headers(
block_headers.clone(),
sync_head,
sync_head_found,
self.blockchain.clone(),
self.peers.clone(),
self.cancel_token.clone(),
)
.await?;
block_headers.clear();
}
}

if sync_head_found {
break;
};
}
Ok(())
}
Ok(())
}

/// Executes the given blocks and stores them
Expand Down Expand Up @@ -677,7 +741,7 @@ impl FullBlockSyncState {
if let Err((err, batch_failure)) = Syncer::add_blocks(
blockchain.clone(),
block_batch,
sync_head_found,
sync_head_found || finished,
cancel_token.clone(),
)
.await
Expand Down
Loading
Loading