Skip to content

Commit

Permalink
Merge #346
Browse files Browse the repository at this point in the history
346: refactor: replace unwrap with expect r=doitian a=zhangsoledad



Co-authored-by: zhangsoledad <787953403@qq.com>
  • Loading branch information
bors[bot] and zhangsoledad committed Mar 21, 2019
2 parents 155a65d + e68d90a commit 99164a2
Show file tree
Hide file tree
Showing 15 changed files with 93 additions and 48 deletions.
40 changes: 32 additions & 8 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -316,21 +316,33 @@ impl<CI: ChainIndex + 'static> ChainService<CI> {
) {
if new_tip_number <= current_tip_number {
for bn in new_tip_number..=current_tip_number {
let hash = self.shared.block_hash(bn).unwrap();
let old_block = self.shared.block(&hash).unwrap();
let hash = self
.shared
.block_hash(bn)
.expect("block hash stored before alignment_fork");
let old_block = self
.shared
.block(&hash)
.expect("block data stored before alignment_fork");
fork.detached_blocks.push(old_block);
}
} else {
while index.number > current_tip_number {
if index.unseen {
let ext = self.shared.block_ext(&index.hash).unwrap();
let ext = self
.shared
.block_ext(&index.hash)
.expect("block ext stored before alignment_fork");
if ext.txs_verified.is_none() {
fork.dirty_exts.push(ext)
} else {
index.unseen = false;
}
}
let new_block = self.shared.block(&index.hash).unwrap();
let new_block = self
.shared
.block(&index.hash)
.expect("block data stored before alignment_fork");
index.forward(new_block.header().parent_hash().clone());
fork.attached_blocks.push(new_block);
}
Expand All @@ -342,23 +354,35 @@ impl<CI: ChainIndex + 'static> ChainService<CI> {
if index.number == 0 {
break;
}
let detached_hash = self.shared.block_hash(index.number).unwrap();
let detached_hash = self
.shared
.block_hash(index.number)
.expect("detached hash stored before find_fork_until_latest_common");
if detached_hash == index.hash {
break;
}
let detached_blocks = self.shared.block(&detached_hash).unwrap();
let detached_blocks = self
.shared
.block(&detached_hash)
.expect("detached block stored before find_fork_until_latest_common");
fork.detached_blocks.push(detached_blocks);

if index.unseen {
let ext = self.shared.block_ext(&index.hash).unwrap();
let ext = self
.shared
.block_ext(&index.hash)
.expect("block ext stored before find_fork_until_latest_common");
if ext.txs_verified.is_none() {
fork.dirty_exts.push(ext)
} else {
index.unseen = false;
}
}

let attached_block = self.shared.block(&index.hash).unwrap();
let attached_block = self
.shared
.block(&index.hash)
.expect("attached block stored before find_fork_until_latest_common");
index.forward(attached_block.header().parent_hash().clone());
fork.attached_blocks.push(attached_block);
}
Expand Down
4 changes: 2 additions & 2 deletions core/src/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ pub struct RawHeader {

impl RawHeader {
pub fn pow_hash(&self) -> H256 {
blake2b_256(serialize(self).unwrap()).into()
blake2b_256(serialize(self).expect("RawHeader serialize should not fail")).into()
}

pub fn with_seal(self, seal: Seal) -> Header {
Expand Down Expand Up @@ -151,7 +151,7 @@ impl Header {
}

pub fn hash(&self) -> H256 {
blake2b_256(serialize(&self).unwrap()).into()
blake2b_256(serialize(&self).expect("Header serialize should not fail")).into()
}

pub fn pow_hash(&self) -> H256 {
Expand Down
13 changes: 9 additions & 4 deletions core/src/script.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,9 @@ type ScriptTuple = (
Vec<Vec<u8>>,
);

const VEC_WRITE_ALL_EXPECT: &str =
"Essentially, Vec::write_all invoke extend_from_slice, should not fail";

impl Script {
pub fn new(
version: u8,
Expand Down Expand Up @@ -151,7 +154,9 @@ impl Script {
// TODO: switch to flatbuffer serialization once we
// can do stable serialization using flatbuffer.
if let Some(ref data) = self.reference {
bytes.write_all(data.as_bytes()).unwrap();
bytes
.write_all(data.as_bytes())
.expect(VEC_WRITE_ALL_EXPECT);
}
// A separator is used here to prevent the rare case
// that some binary might contain the exactly
Expand All @@ -160,12 +165,12 @@ impl Script {
// the hash. Note this might not solve every problem,
// when flatbuffer change is done, we can leverage flatbuffer
// serialization directly, which will be more reliable.
bytes.write_all(b"|").unwrap();
bytes.write_all(b"|").expect(VEC_WRITE_ALL_EXPECT);
if let Some(ref data) = self.binary {
bytes.write_all(&data).unwrap()
bytes.write_all(&data).expect(VEC_WRITE_ALL_EXPECT)
}
for argument in &self.signed_args {
bytes.write_all(argument).unwrap();
bytes.write_all(argument).expect(VEC_WRITE_ALL_EXPECT);
}
blake2b_256(bytes).into()
}
Expand Down
4 changes: 2 additions & 2 deletions core/src/transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ impl ProposalShortId {
}

pub fn hash(&self) -> H256 {
blake2b_256(serialize(self).unwrap()).into()
blake2b_256(serialize(self).expect("ProposalShortId serialize should not fail")).into()
}

pub fn zero() -> Self {
Expand Down Expand Up @@ -257,7 +257,7 @@ impl Transaction {
}

pub fn hash(&self) -> H256 {
blake2b_256(serialize(&self).unwrap()).into()
blake2b_256(serialize(&self).expect("Transaction serialize should not fail")).into()
}

pub fn out_points_iter(&self) -> impl Iterator<Item = &OutPoint> {
Expand Down
2 changes: 1 addition & 1 deletion core/src/uncle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,6 @@ pub fn uncles_hash(uncles: &[UncleBlock]) -> H256 {
if uncles.is_empty() {
H256::zero()
} else {
blake2b_256(serialize(uncles).unwrap()).into()
blake2b_256(serialize(uncles).expect("Uncle serialize should not fail")).into()
}
}
7 changes: 2 additions & 5 deletions db/src/diskdb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,8 @@ impl RocksDB {
let cf_options: Vec<&str> = cfnames.iter().map(|n| n as &str).collect();
let db = DB::open_cf(&opts, &config.path, &cf_options).expect("Failed to open rocksdb");

if config.options.is_some() {
let rocksdb_options: Vec<(&str, &str)> = config
.options
.as_ref()
.unwrap()
if let Some(db_opt) = config.options.as_ref() {
let rocksdb_options: Vec<(&str, &str)> = db_opt
.iter()
.map(|(k, v)| (k.as_str(), v.as_str()))
.collect();
Expand Down
6 changes: 3 additions & 3 deletions network/src/network_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,10 @@ impl NetworkService {
ckb_event_receiver,
ping_event_receiver,
)
.unwrap();
init_tx.send(()).unwrap();
.expect("Network thread init");
init_tx.send(()).expect("Network init signal send");
// here we use default config
let network_runtime = runtime::Runtime::new().unwrap();
let network_runtime = runtime::Runtime::new().expect("Network tokio runtime init");;
match network_runtime.block_on_all(network_future) {
Ok(_) => info!(target: "network", "network service exit"),
Err(err) => panic!("network service exit unexpected {}", err),
Expand Down
4 changes: 2 additions & 2 deletions network/src/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ impl ServiceProtocol for CKBHandler {
.remote_pubkey
.as_ref()
.map(|pubkey| pubkey.peer_id())
.unwrap(),
parsed_version.unwrap(),
.expect("remote_pubkey existence checked"),
parsed_version.expect("parsed_version existence checked"),
)
};
debug!(target: "network", "ckb protocol connected, addr: {}, protocol: {}, version: {}, peer_id: {:?}", session.address, self.id, version, &peer_id);
Expand Down
6 changes: 3 additions & 3 deletions pow/src/cuckoo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,11 +201,11 @@ impl Cuckoo {
let next_lower = *from_upper[&cur_edge.0]
.iter()
.find(|v| **v != cur_edge.1)
.unwrap();
.expect("next_lower should be found");
let next_upper = *from_lower[&next_lower]
.iter()
.find(|u| **u != cur_edge.0)
.unwrap();
.expect("next_upper should be found");
cur_edge = (next_upper, next_lower);
cycle_length += 2;

Expand All @@ -232,7 +232,7 @@ impl Cuckoo {
}
let path_u = Cuckoo::path(&graph, u);
let path_v = Cuckoo::path(&graph, v);
if path_u.last().unwrap() == path_v.last().unwrap() {
if path_u.last().is_some() && (path_u.last() == path_v.last()) {
let common = path_u
.iter()
.rev()
Expand Down
23 changes: 14 additions & 9 deletions protocol/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ mod convert;
mod protocol_generated;

pub use crate::protocol_generated::ckb::protocol::*;
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
use hash::blake2b_256;
use byteorder::{LittleEndian, ReadBytesExt};
use hash::new_blake2b;
use numext_fixed_hash::H256;
use siphasher::sip::SipHasher;
use std::hash::Hasher;
Expand Down Expand Up @@ -39,14 +39,19 @@ impl<'a, T: flatbuffers::Follow<'a> + 'a> Iterator for FlatbuffersVectorIterator
pub type ShortTransactionID = [u8; 6];

pub fn short_transaction_id_keys(header_nonce: u64, random_nonce: u64) -> (u64, u64) {
// sha3-256(header nonce + random nonce) in little-endian
let mut bytes = vec![];
bytes.write_u64::<LittleEndian>(header_nonce).unwrap();
bytes.write_u64::<LittleEndian>(random_nonce).unwrap();
let block_header_with_nonce_hash = blake2b_256(bytes);
// blake2b-256(header nonce + random nonce) in little-endian
let mut block_header_with_nonce_hash = [0; 32];
let mut blake2b = new_blake2b();
blake2b.update(&header_nonce.to_le_bytes());
blake2b.update(&random_nonce.to_le_bytes());
blake2b.finalize(&mut block_header_with_nonce_hash);

let key0 = LittleEndian::read_u64(&block_header_with_nonce_hash[0..8]);
let key1 = LittleEndian::read_u64(&block_header_with_nonce_hash[8..16]);
let key0 = (&block_header_with_nonce_hash[0..8])
.read_u64::<LittleEndian>()
.expect("read bound checked, should not fail");
let key1 = (&block_header_with_nonce_hash[8..16])
.read_u64::<LittleEndian>()
.expect("read bound checked, should not fail");

(key0, key1)
}
Expand Down
7 changes: 6 additions & 1 deletion rpc/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,12 @@ impl RpcServer {
]))
.threads(config.threads.unwrap_or_else(num_cpus::get))
.max_request_body_size(config.max_request_body_size)
.start_http(&config.listen_address.parse().unwrap())
.start_http(
&config
.listen_address
.parse()
.expect("config listen_address parsed"),
)
.expect("Jsonrpc initialize");

RpcServer { server }
Expand Down
2 changes: 1 addition & 1 deletion spec/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ impl ChainSpec {
pub fn read_from_file<P: AsRef<Path>>(path: P) -> Result<ChainSpec, Box<Error>> {
let config_str = std::fs::read_to_string(path.as_ref())?;
let mut spec: Self = toml::from_str(&config_str)?;
spec.resolve_paths(path.as_ref().parent().unwrap());
spec.resolve_paths(path.as_ref().parent().expect("chain spec path resolve"));
Ok(spec)
}

Expand Down
5 changes: 4 additions & 1 deletion src/cli/miner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,10 @@ impl Config {
pub fn read_from_file<P: AsRef<Path>>(path: P) -> Result<Config, Box<Error>> {
let config_str = std::fs::read_to_string(path.as_ref())?;
let mut config: Self = toml::from_str(&config_str)?;
config.resolve_paths(path.as_ref().parent().unwrap());
config.resolve_paths(path.as_ref().parent().unwrap_or_else(|| {
eprintln!("Invalid config file path {:?}", path.as_ref());
::std::process::exit(1);
}));
Ok(config)
}
}
Expand Down
12 changes: 9 additions & 3 deletions util/build-info/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,15 @@ use std::env;
#[macro_export]
macro_rules! get_version {
() => {{
let major = env!("CARGO_PKG_VERSION_MAJOR").parse::<u8>().unwrap();
let minor = env!("CARGO_PKG_VERSION_MINOR").parse::<u8>().unwrap();
let patch = env!("CARGO_PKG_VERSION_PATCH").parse::<u16>().unwrap();
let major = env!("CARGO_PKG_VERSION_MAJOR")
.parse::<u8>()
.expect("CARGO_PKG_VERSION_MAJOR parse success");
let minor = env!("CARGO_PKG_VERSION_MINOR")
.parse::<u8>()
.expect("CARGO_PKG_VERSION_MINOR parse success");
let patch = env!("CARGO_PKG_VERSION_PATCH")
.parse::<u16>()
.expect("CARGO_PKG_VERSION_PATCH parse success");

let host_compiler = $crate::get_channel();
let commit_describe = option_env!("COMMIT_DESCRIBE").map(|s| s.to_string());
Expand Down
6 changes: 3 additions & 3 deletions util/logger/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl Logger {
}
}
})
.unwrap();
.expect("Logger thread init should not fail");

Logger {
sender,
Expand Down Expand Up @@ -134,15 +134,15 @@ impl Log for Logger {
}

fn flush(&self) {
let handle = self.handle.lock().take().unwrap();
let handle = self.handle.lock().take().expect("Logger flush only once");
let _ = self.sender.send(Message::Terminate);
let _ = handle.join();
}
}

fn sanitize_color(s: &str) -> String {
lazy_static! {
static ref RE: Regex = Regex::new("\x1b\\[[^m]+m").unwrap();
static ref RE: Regex = Regex::new("\x1b\\[[^m]+m").expect("Regex compile success");
}
RE.replace_all(s, "").to_string()
}
Expand Down

0 comments on commit 99164a2

Please sign in to comment.