Skip to content

Commit

Permalink
Merge branch 'dev' into dependabot/cargo/tokio-1.38.1
Browse files Browse the repository at this point in the history
  • Loading branch information
BillyWooo authored Jul 23, 2024
2 parents 6549826 + f789aad commit d8bc91d
Show file tree
Hide file tree
Showing 16 changed files with 137 additions and 38 deletions.
1 change: 1 addition & 0 deletions .github/file-filter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ tee_test: &tee_test
- 'tee-worker/cli/*.sh'
- 'docker/**'
- 'tee-worker/docker/*.yml'
- 'tee-worker/litentry/core/assertion-build/src/dynamic/contracts/**'

bitacross_src: &bitacross_src
- 'bitacross-worker/**/*.rs'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/benchmark-machine.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
fetch-depth: 0

- name: Set up AWS CLI
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/benchmark-runtime-weights.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ jobs:
docker pull litentry/litentry-parachain:runtime-benchmarks
- name: Set up AWS CLI
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
Expand Down
11 changes: 5 additions & 6 deletions bitacross-worker/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ pub trait ChainApi {
from: Self::BlockNumber,
to: Self::BlockNumber,
) -> ApiResult<Vec<GenericSignedBlock<Self::Block>>>;
fn get_block_by_number(
&self,
block: Self::BlockNumber,
) -> ApiResult<Option<GenericSignedBlock<Self::Block>>>;
fn is_grandpa_available(&self) -> ApiResult<bool>;
fn grandpa_authorities(&self, hash: Option<Self::Hash>) -> ApiResult<AuthorityList>;
fn grandpa_authorities_proof(&self, hash: Option<Self::Hash>) -> ApiResult<StorageProof>;
Expand Down Expand Up @@ -101,6 +105,16 @@ where
Ok(blocks)
}

fn get_block_by_number(
&self,
block_number: Self::BlockNumber,
) -> ApiResult<Option<GenericSignedBlock<Self::Block>>> {
match self.get_signed_block_by_num(Some(block_number))? {
Some(block) => Ok(Some(block.into())),
None => Ok(None),
}
}

fn is_grandpa_available(&self) -> ApiResult<bool> {
let genesis_hash = Some(self.get_genesis_hash().expect("Failed to get genesis hash"));
Ok(self
Expand Down
1 change: 1 addition & 0 deletions bitacross-worker/service/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ log = "0.4"
parking_lot = "0.12.1"
parse_duration = "2.1.1"
prometheus = { version = "0.13.0", features = ["process"], default-features = false } # Enabling std lead to protobuf dependency conflicts with substrate, and we don't need it.
rayon = "1.10.0"
regex = "1.9.5"
scale-info = { version = "2.10.0", default-features = false, features = ["derive"] }
serde = "1.0"
Expand Down
42 changes: 32 additions & 10 deletions bitacross-worker/service/src/parentchain_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use itp_storage::StorageProof;
use itp_time_utils::duration_now;
use itp_types::ShardIdentifier;
use log::*;
use rayon::prelude::*;
use sp_consensus_grandpa::VersionedAuthorityList;
use sp_runtime::traits::Header as HeaderTrait;
use std::{cmp::min, sync::Arc, time::Duration};
Expand Down Expand Up @@ -204,10 +205,27 @@ where
}

loop {
let block_chunk_to_sync = self.parentchain_api.get_blocks(
start_block,
min(start_block + BLOCK_SYNC_BATCH_SIZE, curr_block_number),
)?;
let chunk_range =
start_block..min(start_block + BLOCK_SYNC_BATCH_SIZE, curr_block_number);

let start_fetch_time = duration_now();

let block_chunk_to_sync = chunk_range
.into_par_iter()
.filter_map(|block_number| {
self.parentchain_api
.get_block_by_number(block_number)
.expect("failed to get block")
})
.collect::<Vec<_>>();

debug!(
"[{:?}] Fetched {} blocks in {}",
id,
block_chunk_to_sync.len(),
format_duration(duration_now().saturating_sub(start_fetch_time))
);

if block_chunk_to_sync.len() == BLOCK_SYNC_BATCH_SIZE as usize {
let now = duration_now();
let total_blocks = curr_block_number.saturating_sub(last_synced_header_number);
Expand Down Expand Up @@ -247,7 +265,7 @@ where
vec![]
} else {
let evs = block_chunk_to_sync
.iter()
.par_iter()
.map(|block| {
self.parentchain_api.get_events_for_block(Some(block.block.header.hash()))
})
Expand All @@ -260,13 +278,15 @@ where
vec![]
} else {
block_chunk_to_sync
.iter()
.par_iter()
.map(|block| {
self.parentchain_api.get_events_value_proof(Some(block.block.header.hash()))
})
.collect::<Result<Vec<_>, _>>()?
};

let sync_start_time = duration_now();

self.enclave_api.sync_parentchain(
block_chunk_to_sync.as_slice(),
events_chunk_to_sync.as_slice(),
Expand All @@ -275,14 +295,16 @@ where
immediate_import,
)?;

info!(
"[{:?}] Synced parentchain batch in {}",
id,
format_duration(duration_now().saturating_sub(sync_start_time))
);

let api_client_until_synced_header = block_chunk_to_sync
.last()
.map(|b| b.block.header.clone())
.ok_or(Error::EmptyChunk)?;
debug!(
"[{:?}] Synced {} out of {} finalized parentchain blocks",
id, api_client_until_synced_header.number, curr_block_number,
);

// #TODO: #1451: fix api/client types
until_synced_header =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,11 @@ impl ChainApi for ParentchainApiMock {
fn get_events_for_block(&self, _block_hash: Option<H256>) -> ApiResult<Vec<u8>> {
Ok(Default::default())
}

fn get_block_by_number(
&self,
block: Self::BlockNumber,
) -> ApiResult<Option<itc_parentchain_test::SignedBlock<Self::Block>>> {
Ok(self.parentchain.get(block as usize).cloned())
}
}
11 changes: 5 additions & 6 deletions tee-worker/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions tee-worker/cli/lit_test_failed_parentchain_extrinsic.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ echo "New Account created: ${FIRST_NEW_ACCOUNT}"
echo "Linking identity to Bob"
OUTPUT=$(${CLIENT} link-identity //Bob did:litentry:substrate:${FIRST_NEW_ACCOUNT} litentry) || { echo "Link identity command failed"; exit 1; }
echo "Finished Linking identity to Bob"
sleep 30
sleep 60

echo "Capturing IDGraph Hash of Bob"
INITIAL_ID_GRAPH_HASH=$(${CLIENT} id-graph-hash did:litentry:substrate:0x8eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a48) || { echo "Failed to get ID Graph hash"; exit 1; }
Expand All @@ -58,7 +58,7 @@ echo "New Account created: ${SECOND_NEW_ACCOUNT}"
echo "Linking new identity to Bob with Eve as delegate signer"
OUTPUT=$(${CLIENT} link-identity //Bob "did:litentry:substrate:${SECOND_NEW_ACCOUNT}" litentry -d //Eve) || { echo "Link identity command failed"; exit 1; }
echo "Finished Linking identity to Bob"
sleep 30
sleep 60

echo "Capturing IDGraph Hash of Bob"
FINAL_ID_GRAPH_HASH=$(${CLIENT} id-graph-hash did:litentry:substrate:0x8eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a48) || { echo "Failed to get ID Graph hash"; exit 1; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ pub trait ChainApi {
from: Self::BlockNumber,
to: Self::BlockNumber,
) -> ApiResult<Vec<GenericSignedBlock<Self::Block>>>;
fn get_block_by_number(
&self,
block: Self::BlockNumber,
) -> ApiResult<Option<GenericSignedBlock<Self::Block>>>;
fn is_grandpa_available(&self) -> ApiResult<bool>;
fn grandpa_authorities(&self, hash: Option<Self::Hash>) -> ApiResult<AuthorityList>;
fn grandpa_authorities_proof(&self, hash: Option<Self::Hash>) -> ApiResult<StorageProof>;
Expand Down Expand Up @@ -101,6 +105,16 @@ where
Ok(blocks)
}

fn get_block_by_number(
&self,
block_number: Self::BlockNumber,
) -> ApiResult<Option<GenericSignedBlock<Self::Block>>> {
match self.get_signed_block_by_num(Some(block_number))? {
Some(block) => Ok(Some(block.into())),
None => Ok(None),
}
}

fn is_grandpa_available(&self) -> ApiResult<bool> {
let genesis_hash = Some(self.get_genesis_hash().expect("Failed to get genesis hash"));
Ok(self
Expand Down
1 change: 1 addition & 0 deletions tee-worker/service/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ log = "0.4"
parking_lot = "0.12.1"
parse_duration = "2.1.1"
prometheus = { version = "0.13.0", features = ["process"], default-features = false } # Enabling std lead to protobuf dependency conflicts with substrate, and we don't need it.
rayon = "1.10.0"
regex = "1.9.5"
scale-info = { version = "2.10.0", default-features = false, features = ["derive"] }
serde = "1.0"
Expand Down
42 changes: 32 additions & 10 deletions tee-worker/service/src/parentchain_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use itp_storage::StorageProof;
use itp_time_utils::duration_now;
use itp_types::ShardIdentifier;
use log::*;
use rayon::prelude::*;
use sp_consensus_grandpa::VersionedAuthorityList;
use sp_runtime::traits::Header as HeaderTrait;
use std::{cmp::min, sync::Arc, time::Duration};
Expand Down Expand Up @@ -204,10 +205,27 @@ where
}

loop {
let block_chunk_to_sync = self.parentchain_api.get_blocks(
start_block,
min(start_block + BLOCK_SYNC_BATCH_SIZE, curr_block_number),
)?;
let chunk_range =
start_block..min(start_block + BLOCK_SYNC_BATCH_SIZE, curr_block_number);

let start_fetch_time = duration_now();

let block_chunk_to_sync = chunk_range
.into_par_iter()
.filter_map(|block_number| {
self.parentchain_api
.get_block_by_number(block_number)
.expect("failed to get block")
})
.collect::<Vec<_>>();

debug!(
"[{:?}] Fetched {} blocks in {}",
id,
block_chunk_to_sync.len(),
format_duration(duration_now().saturating_sub(start_fetch_time))
);

if block_chunk_to_sync.len() == BLOCK_SYNC_BATCH_SIZE as usize {
let now = duration_now();
let total_blocks = curr_block_number.saturating_sub(last_synced_header_number);
Expand Down Expand Up @@ -247,7 +265,7 @@ where
vec![]
} else {
let evs = block_chunk_to_sync
.iter()
.par_iter()
.map(|block| {
self.parentchain_api.get_events_for_block(Some(block.block.header.hash()))
})
Expand All @@ -260,13 +278,15 @@ where
vec![]
} else {
block_chunk_to_sync
.iter()
.par_iter()
.map(|block| {
self.parentchain_api.get_events_value_proof(Some(block.block.header.hash()))
})
.collect::<Result<Vec<_>, _>>()?
};

let sync_start_time = duration_now();

self.enclave_api.sync_parentchain(
block_chunk_to_sync.as_slice(),
events_chunk_to_sync.as_slice(),
Expand All @@ -275,14 +295,16 @@ where
immediate_import,
)?;

info!(
"[{:?}] Synced parentchain batch in {}",
id,
format_duration(duration_now().saturating_sub(sync_start_time))
);

let api_client_until_synced_header = block_chunk_to_sync
.last()
.map(|b| b.block.header.clone())
.ok_or(Error::EmptyChunk)?;
debug!(
"[{:?}] Synced {} out of {} finalized parentchain blocks",
id, api_client_until_synced_header.number, curr_block_number,
);

// #TODO: #1451: fix api/client types
until_synced_header =
Expand Down
7 changes: 7 additions & 0 deletions tee-worker/service/src/tests/mocks/parentchain_api_mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,11 @@ impl ChainApi for ParentchainApiMock {
fn get_events_for_block(&self, _block_hash: Option<H256>) -> ApiResult<Vec<u8>> {
Ok(Default::default())
}

fn get_block_by_number(
&self,
block: Self::BlockNumber,
) -> ApiResult<Option<itc_parentchain_test::SignedBlock<Self::Block>>> {
Ok(self.parentchain.get(block as usize).cloned())
}
}
Loading

0 comments on commit d8bc91d

Please sign in to comment.