Skip to content

Commit

Permalink
Remove unnecessary custom build_network implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
nazar-pc committed Sep 30, 2024
1 parent c92c33f commit f5e261a
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 382 deletions.
8 changes: 7 additions & 1 deletion crates/subspace-node/src/commands/run/domain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,13 @@ pub(super) fn create_domain_configuration(
unreachable!("Memory transport not used in CLI; qed")
}
},
force_synced: false,
// set to be force_synced always for domains since they relay on Consensus chain to derive and import domain blocks.
// If not set, each domain node will wait to be fully synced and as a result will not propagate the transactions over network.
// It would have been ideal to use `Consensus` chain sync service to respond to `is_major_sync` requests but this
// would require upstream changes and with some refactoring. This is not worth the effort at the moment since
// we are planning to enable domain's block request and state sync mechanism in the near future.
// Until such change has been made, domain's sync service needs to be in force_synced state.
force_synced: true,
},
keystore,
state_pruning: pruning_params.state_pruning()?,
Expand Down
51 changes: 23 additions & 28 deletions domains/service/src/domain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ use sc_client_api::{
};
use sc_consensus::{BasicQueue, BoxBlockImport};
use sc_domains::{ExtensionsFactory, RuntimeExecutor};
use sc_network::{NetworkPeers, NotificationMetrics};
use sc_network::service::traits::NetworkService;
use sc_network::{NetworkPeers, NetworkWorker, NotificationMetrics};
use sc_service::{
BuildNetworkParams, Configuration as ServiceConfiguration, NetworkStarter, PartialComponents,
SpawnTasksParams, TFullBackend, TaskManager,
Expand Down Expand Up @@ -98,7 +99,7 @@ where
/// Code executor.
pub code_executor: Arc<CodeExecutor>,
/// Network service.
pub network_service: Arc<sc_network::NetworkService<Block, <Block as BlockT>::Hash>>,
pub network_service: Arc<dyn NetworkService>,
/// Sync service.
pub sync_service: Arc<sc_network_sync::SyncingService<Block>>,
/// RPCHandlers to make RPC queries.
Expand Down Expand Up @@ -368,39 +369,33 @@ where

let transaction_pool = params.transaction_pool.clone();
let mut task_manager = params.task_manager;
let net_config = sc_network::config::FullNetworkConfiguration::new(
let net_config = sc_network::config::FullNetworkConfiguration::<_, _, NetworkWorker<_, _>>::new(
&domain_config.network,
domain_config
.prometheus_config
.as_ref()
.map(|cfg| cfg.registry.clone()),
);

let (
network_service,
system_rpc_tx,
tx_handler_controller,
network_starter,
sync_service,
_block_downloader,
) = crate::build_network(BuildNetworkParams {
config: &domain_config,
net_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),
import_queue: params.import_queue,
// TODO: we might want to re-enable this some day.
block_announce_validator_builder: None,
warp_sync_config: None,
block_relay: None,
metrics: NotificationMetrics::new(
domain_config
.prometheus_config
.as_ref()
.map(|cfg| &cfg.registry),
),
})?;
let (network_service, system_rpc_tx, tx_handler_controller, network_starter, sync_service) =
sc_service::build_network(BuildNetworkParams {
config: &domain_config,
net_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),
import_queue: params.import_queue,
// TODO: we might want to re-enable this some day.
block_announce_validator_builder: None,
warp_sync_config: None,
block_relay: None,
metrics: NotificationMetrics::new(
domain_config
.prometheus_config
.as_ref()
.map(|cfg| &cfg.registry),
),
})?;

let is_authority = domain_config.role.is_authority();
domain_config.rpc.id_provider = provider.rpc_id();
Expand Down
Loading

0 comments on commit f5e261a

Please sign in to comment.