Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .github/workflows/quick-check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Quick check Formatting

on:
push:
branches:
- "*"
pull_request:
types: [opened, synchronize, reopened, ready_for_review]

jobs:
quick_check:
strategy:
matrix:
os: ["ubuntu-latest"]
runs-on: ${{ matrix.os }}
steps:
- name: Install Rust nightly toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
components: clippy, rustfmt

- name: Cache Dependencies & Build Outputs
uses: actions/cache@v2
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: ${{ runner.os }}-${{ matrix.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}

- uses: actions/checkout@v2

- name: Cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
25 changes: 23 additions & 2 deletions .rustfmt.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,23 @@
hard_tabs=true
merge_imports=true
# Basic
hard_tabs = true
max_width = 100
use_small_heuristics = "Max"

# Imports
imports_granularity = "Crate"
reorder_imports = true

# Consistency
newline_style = "Unix"

# Misc
binop_separator = "Back"
chain_width = 80
match_arm_blocks = false
match_arm_leading_pipes = "Preserve"
match_block_trailing_comma = true
reorder_impl_items = false
spaces_around_ranges = false
trailing_comma = "Vertical"
trailing_semicolon = false
use_field_init_shorthand = true
21 changes: 8 additions & 13 deletions client/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,8 @@ impl PurgeChainCmd {
relay_config: sc_service::Configuration,
) -> sc_cli::Result<()> {
let databases = match (self.parachain, self.relaychain) {
(true, true) | (false, false) => vec![
("parachain", para_config.database),
("relaychain", relay_config.database),
],
(true, true) | (false, false) =>
vec![("parachain", para_config.database), ("relaychain", relay_config.database)],
(true, false) => vec![("parachain", para_config.database)],
(false, true) => vec![("relaychain", relay_config.database)],
};
Expand Down Expand Up @@ -86,22 +84,22 @@ impl PurgeChainCmd {
let input = input.trim();

match input.chars().nth(0) {
Some('y') | Some('Y') => {}
Some('y') | Some('Y') => {},
_ => {
println!("Aborted");
return Ok(());
}
return Ok(())
},
}
}

for db_path in &db_paths {
match fs::remove_dir_all(&db_path) {
Ok(_) => {
println!("{:?} removed.", &db_path);
}
},
Err(ref err) if err.kind() == io::ErrorKind::NotFound => {
eprintln!("{:?} did not exist.", &db_path);
}
},
Err(err) => return Err(err.into()),
}
}
Expand Down Expand Up @@ -155,10 +153,7 @@ impl RunCmd {

new_base.validator = self.base.validator || self.collator;

NormalizedRunCmd {
base: new_base,
parachain_id: self.parachain_id,
}
NormalizedRunCmd { base: new_base, parachain_id: self.parachain_id }
}
}

Expand Down
60 changes: 19 additions & 41 deletions client/collator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,12 +81,7 @@ where
) -> Self {
let wait_to_announce = Arc::new(Mutex::new(WaitToAnnounce::new(spawner, announce_block)));

Self {
block_status,
wait_to_announce,
runtime_api,
parachain_consensus,
}
Self { block_status, wait_to_announce, runtime_api, parachain_consensus }
}

/// Checks the status of the given block hash in the Parachain.
Expand All @@ -101,7 +96,7 @@ where
"Skipping candidate production, because block is still queued for import.",
);
false
}
},
Ok(BlockStatus::InChainWithState) => true,
Ok(BlockStatus::InChainPruned) => {
tracing::error!(
Expand All @@ -110,15 +105,15 @@ where
hash,
);
false
}
},
Ok(BlockStatus::KnownBad) => {
tracing::error!(
target: LOG_TARGET,
block_hash = ?hash,
"Block is tagged as known bad and is included in the relay chain! Skipping candidate production!",
);
false
}
},
Ok(BlockStatus::Unknown) => {
if header.number().is_zero() {
tracing::error!(
Expand All @@ -134,7 +129,7 @@ where
);
}
false
}
},
Err(e) => {
tracing::error!(
target: LOG_TARGET,
Expand All @@ -143,7 +138,7 @@ where
"Failed to get block status.",
);
false
}
},
}
}

Expand All @@ -168,8 +163,8 @@ where
error = ?e,
"Failed to collect collation info.",
);
return None;
}
return None
},
};

Some(Collation {
Expand Down Expand Up @@ -202,13 +197,13 @@ where
error = ?e,
"Could not decode the head data."
);
return None;
}
return None
},
};

let last_head_hash = last_head.hash();
if !self.check_block_status(last_head_hash, &last_head) {
return None;
return None
}

tracing::info!(
Expand All @@ -232,8 +227,8 @@ where
Ok(proof) => proof,
Err(e) => {
tracing::error!(target: "cumulus-collator", "Failed to compact proof: {:?}", e);
return None;
}
return None
},
};

// Create the parachain block data for the validators.
Expand All @@ -252,20 +247,11 @@ where

let (result_sender, signed_stmt_recv) = oneshot::channel();

self.wait_to_announce
.lock()
.wait_to_announce(block_hash, signed_stmt_recv);
self.wait_to_announce.lock().wait_to_announce(block_hash, signed_stmt_recv);

tracing::info!(
target: LOG_TARGET,
?block_hash,
"Produced proof-of-validity candidate.",
);
tracing::info!(target: LOG_TARGET, ?block_hash, "Produced proof-of-validity candidate.",);

Some(CollationResult {
collation,
result_sender: Some(result_sender),
})
Some(CollationResult { collation, result_sender: Some(result_sender) })
}
}

Expand Down Expand Up @@ -322,10 +308,7 @@ pub async fn start_collator<Block, RA, BS, Spawner>(
};

overseer_handle
.send_msg(
CollationGenerationMessage::Initialize(config),
"StartCollator",
)
.send_msg(CollationGenerationMessage::Initialize(config), "StartCollator")
.await;

overseer_handle
Expand Down Expand Up @@ -384,10 +367,7 @@ mod tests {
.await
.expect("Imports the block");

Some(ParachainCandidate {
block,
proof: proof.expect("Proof is returned"),
})
Some(ParachainCandidate { block, proof: proof.expect("Proof is returned") })
}
}

Expand Down Expand Up @@ -424,9 +404,7 @@ mod tests {
spawner,
para_id,
key: CollatorPair::generate().0,
parachain_consensus: Box::new(DummyParachainConsensus {
client: client.clone(),
}),
parachain_consensus: Box::new(DummyParachainConsensus { client: client.clone() }),
});
block_on(collator_start);

Expand Down
10 changes: 3 additions & 7 deletions client/consensus/aura/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,8 @@ where
relay_parent: PHash,
validation_data: &PersistedValidationData,
) -> Option<ParachainCandidate<B>> {
let (inherent_data, inherent_data_providers) = self
.inherent_data(parent.hash(), validation_data, relay_parent)
.await?;
let (inherent_data, inherent_data_providers) =
self.inherent_data(parent.hash(), validation_data, relay_parent).await?;

let info = SlotInfo::new(
inherent_data_providers.slot(),
Expand All @@ -234,10 +233,7 @@ where

let res = self.aura_worker.lock().await.on_slot(info).await?;

Some(ParachainCandidate {
block: res.block,
proof: res.storage_proof,
})
Some(ParachainCandidate { block: res.block, proof: res.storage_proof })
}
}

Expand Down
4 changes: 1 addition & 3 deletions client/consensus/common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,7 @@ impl<B: BlockT> ParachainConsensus<B> for Box<dyn ParachainConsensus<B> + Send +
relay_parent: PHash,
validation_data: &PersistedValidationData,
) -> Option<ParachainCandidate<B>> {
(*self)
.produce_candidate(parent, relay_parent, validation_data)
.await
(*self).produce_candidate(parent, relay_parent, validation_data).await
}
}

Expand Down
Loading