Skip to content

Commit

Permalink
Merge branch 'master' into v0.6-alpha
Browse files Browse the repository at this point in the history
  • Loading branch information
Peilun Li committed Jul 8, 2020
2 parents f85d2f9 + b345ad1 commit 48f19f8
Show file tree
Hide file tree
Showing 18 changed files with 199 additions and 194 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ was caused by a wrong assumption of the uniqueness of the trie proof key.
- Remove address from Account rlp format, which was included unexpectedly
before.

- Changed RewardInfo struct to add author info.

## Improvements

- Rename local rpc send_transaction with cfx_sendTransaction.
Expand Down
2 changes: 1 addition & 1 deletion blockgen/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ impl Worker {
.spawn(move || {
let sleep_duration = time::Duration::from_millis(100);
let mut problem: Option<ProofOfWorkProblem> = None;
let bg_pow = Arc::new(PowComputer::new(bg_handle.pow_config.test_mode));
let bg_pow = Arc::new(PowComputer::new(bg_handle.pow_config.use_octopus()));

loop {
match *bg_handle.state.read() {
Expand Down
10 changes: 6 additions & 4 deletions blockgen/src/miner/stratum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,12 +198,14 @@ impl StratumJobDispatcher {
}

/// Serializes payload for stratum service
fn payload(&self, pow_hash: H256, boundary: U256) -> String {
fn payload(
&self, block_height: u64, pow_hash: H256, boundary: U256,
) -> String {
// Now we just fill the job_id as pow_hash. This will be more consistent
// with the convention.
format!(
r#"["0x{:x}", "0x{:x}","0x{:x}"]"#,
pow_hash, pow_hash, boundary
r#"["0x{:x}", "{}", "0x{:x}","0x{:x}"]"#,
pow_hash, block_height, pow_hash, boundary
)
}
}
Expand Down Expand Up @@ -239,7 +241,7 @@ impl NotifyWork for Stratum {

self.dispatcher.set_current_problem(&prob);
self.service.push_work_all(
self.dispatcher.payload(prob.block_hash, prob.boundary)
self.dispatcher.payload(prob.block_height, prob.block_hash, prob.boundary)
).unwrap_or_else(
|e| warn!(target: "stratum", "Error while pushing work: {:?}", e)
);
Expand Down
2 changes: 2 additions & 0 deletions blockgen/stratum/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,8 @@ impl Stratum {

impl PushWorkHandler for Stratum {
fn push_work_all(&self, payload: String) -> Result<(), Error> {
warn!("Pushing job {} to miners", payload);

self.implementation
.push_work_all(payload, &self.tcp_dispatcher)
}
Expand Down
2 changes: 1 addition & 1 deletion client/src/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ pub fn initialize_common_modules(
debug!("Initialize genesis_block={:?}", genesis_block);

let pow_config = conf.pow_config();
let pow = Arc::new(PowComputer::new(pow_config.test_mode));
let pow = Arc::new(PowComputer::new(pow_config.use_octopus()));

let data_man = Arc::new(BlockDataManager::new(
cache_config,
Expand Down
2 changes: 2 additions & 0 deletions client/src/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ build_config! {
(stratum_port, (u16), 32525)
(stratum_secret, (Option<String>), None)
(use_stratum, (bool), false)
(use_octopus_in_test_mode, (bool), false)

// Network section.
(jsonrpc_local_tcp_port, (Option<u16>), None)
Expand Down Expand Up @@ -456,6 +457,7 @@ impl Configuration {

ProofOfWorkConfig::new(
self.is_test_or_dev_mode(),
self.raw_conf.use_octopus_in_test_mode,
self.raw_conf.use_stratum,
self.raw_conf.initial_difficulty,
self.raw_conf.stratum_listen_address.clone(),
Expand Down
14 changes: 11 additions & 3 deletions client/src/rpc/impls/cfx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ impl RpcImpl {
let (
BlockExecutionResultWithEpoch(epoch_hash, execution_result),
address,
state_root,
maybe_state_root,
) = match maybe_results {
None => return Ok(None),
Some(result_tuple) => result_tuple,
Expand Down Expand Up @@ -529,7 +529,7 @@ impl RpcImpl {
address,
prior_gas_used,
Some(epoch_number),
Some(state_root),
maybe_state_root,
);
Ok(Some(rpc_receipt))
}
Expand Down Expand Up @@ -753,7 +753,15 @@ impl RpcImpl {
.get_data_manager()
.block_reward_result_by_hash(&b)
{
ret.push(RpcRewardInfo::new(b, reward_result));
if let Some(block_header) =
self.consensus.get_data_manager().block_header_by_hash(&b)
{
ret.push(RpcRewardInfo::new(
b,
block_header.author().clone(),
reward_result,
));
}
}
}
Ok(ret)
Expand Down
10 changes: 7 additions & 3 deletions client/src/rpc/types/reward_info.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,24 @@
use crate::rpc::types::{H256, U256};
use cfx_types::H256 as CfxH256;
use crate::rpc::types::{H160, H256, U256};
use cfx_types::{H160 as CfxH160, H256 as CfxH256};
use cfxcore::block_data_manager::BlockRewardResult;

#[derive(Debug, Serialize, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RewardInfo {
block_hash: H256,
author: H160,
total_reward: U256,
base_reward: U256,
tx_fee: U256,
}

impl RewardInfo {
pub fn new(block_hash: CfxH256, reward_result: BlockRewardResult) -> Self {
pub fn new(
block_hash: CfxH256, author: CfxH160, reward_result: BlockRewardResult,
) -> Self {
RewardInfo {
block_hash: block_hash.into(),
author: author.into(),
total_reward: reward_result.total_reward.into(),
base_reward: reward_result.base_reward.into(),
tx_fee: reward_result.tx_fee.into(),
Expand Down
22 changes: 14 additions & 8 deletions core/src/consensus/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,11 @@ impl ConsensusGraph {

pub fn get_transaction_receipt_and_block_info(
&self, tx_hash: &H256,
) -> Option<(BlockExecutionResultWithEpoch, TransactionIndex, H256)> {
) -> Option<(
BlockExecutionResultWithEpoch,
TransactionIndex,
Option<H256>,
)> {
// Note: `transaction_index_by_hash` might return outdated results if
// there was a pivot chain reorg but the tx was not re-executed yet. In
// this case, `block_execution_results_by_hash` will detect that the
Expand All @@ -701,22 +705,24 @@ impl ConsensusGraph {
)
};
let epoch_hash = results_with_epoch.0;
match self.executor.wait_for_result(epoch_hash) {
let maybe_state_root = match self.executor.wait_for_result(epoch_hash) {
Ok(execution_commitment) => {
// We already has transaction address with epoch_hash executed,
// so we can always get the state_root with
// `wait_for_result`
let state_root = execution_commitment
.state_root_with_aux_info
.aux_info
.state_root_hash;
Some((results_with_epoch, address, state_root))
Some(
execution_commitment
.state_root_with_aux_info
.aux_info
.state_root_hash,
)
}
Err(msg) => {
warn!("get_transaction_receipt_and_block_info() gets the following error from ConsensusExecutor: {}", msg);
None
}
}
};
Some((results_with_epoch, address, maybe_state_root))
}

pub fn next_nonce(
Expand Down
53 changes: 46 additions & 7 deletions core/src/pow/compute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ use std::{mem, sync::Arc};
const MIX_WORDS: usize = POW_MIX_BYTES / 4;
const MIX_NODES: usize = MIX_WORDS / NODE_WORDS;
pub const FNV_PRIME: u32 = 0x01000193;
const MOD: u32 = 1000000000 + 7;
const MOD64: u64 = MOD as u64;

pub struct Light {
block_height: u64,
Expand Down Expand Up @@ -75,6 +77,15 @@ pub fn light_compute(light: &Light, header_hash: &H256, nonce: u64) -> H256 {
hash_compute(light, full_size, header_hash, nonce)
}

fn as_u32_le(bytes: &[u8]) -> u32 {
assert!(bytes.len() == 4);

((bytes[0] as u32) << 0)
+ ((bytes[1] as u32) << 8)
+ ((bytes[2] as u32) << 16)
+ ((bytes[3] as u32) << 24)
}

fn hash_compute(
light: &Light, full_size: usize, header_hash: &H256, nonce: u64,
) -> H256 {
Expand All @@ -100,6 +111,7 @@ fn hash_compute(
struct MixBuf {
half_mix: Node,
compress_bytes: [u8; MIX_WORDS],
magic_mix: u32,
};

if full_size % MIX_WORDS != 0 {
Expand Down Expand Up @@ -135,6 +147,7 @@ fn hash_compute(
Node { bytes: out }
},
compress_bytes: [0u8; MIX_WORDS],
magic_mix: 0,
};

let mut mix: [_; MIX_NODES] = [buf.half_mix.clone(), buf.half_mix.clone()];
Expand All @@ -145,6 +158,12 @@ fn hash_compute(
let cache: &[Node] = light.cache.as_ref();
let first_val = buf.half_mix.as_words()[0];

let magic_b0 = as_u32_le(&header_hash[0..4]);
let magic_b1 = as_u32_le(&header_hash[4..8]);
let magic_b2 = as_u32_le(&header_hash[8..12]);
let magic_w = as_u32_le(&header_hash[12..16]);
let mut magic_c: [u32; POW_ACCESSES] = [0; POW_ACCESSES];

debug_assert_eq!(MIX_NODES, 2);
debug_assert_eq!(NODE_WORDS, 16);

Expand All @@ -170,6 +189,7 @@ fn hash_compute(
mix[n].as_words_mut().iter_mut().zip(tmp_node.as_words())
{
*a = fnv_hash(*a, *b);
magic_c[i as usize] = magic_c[i as usize] ^ *a;
}
}
}
Expand All @@ -184,12 +204,6 @@ fn hash_compute(
let compress: &mut [u32; MIX_WORDS / 4] = unsafe {
make_const_array!(MIX_WORDS / 4, &mut buf.compress_bytes)
};
#[cfg(target_endian = "big")]
{
compile_error!(
"OpenEthereum currently only supports little-endian targets"
);
}

// Compress mix
debug_assert_eq!(MIX_WORDS / 4, 8);
Expand All @@ -204,6 +218,31 @@ fn hash_compute(
}
}

let mut magic_mix: [u32; POW_ACCESSES] = [0; POW_ACCESSES];

for i in 0..POW_ACCESSES as usize {
let mut p: u64 = (magic_b2 as u64) % MOD64;
let mut q: u64 = (magic_b1 as u64) % MOD64;
for _ in 0..i as usize {
p = ((p * (magic_w as u64)) % MOD64 * (magic_w as u64)) % MOD64;
q = (q * (magic_w as u64)) % MOD64;
}
// println!("p={}, q={}", p, q);
let x = ((p + q + (magic_b0 as u64)) % MOD64) as u32;
let mut power = 1u64;
for k in 0..POW_ACCESSES as usize {
let term = ((power * (magic_c[k] as u64)) % MOD64) as u32;
power = (power * (x as u64)) % MOD64;
magic_mix[i] = (magic_mix[i] + term) % MOD;
}
}

let mut reduction: u32 = 0;
for i in 0..POW_ACCESSES as usize {
reduction = reduction.wrapping_mul(FNV_PRIME) ^ magic_mix[i];
}
buf.magic_mix = reduction;

let _mix_hash = buf.compress_bytes;

let value: H256 = {
Expand All @@ -213,7 +252,7 @@ fn hash_compute(
let buffer = unsafe {
core::slice::from_raw_parts(
read_ptr,
buf.half_mix.bytes.len() + buf.compress_bytes.len(),
buf.half_mix.bytes.len() + buf.compress_bytes.len() + 4,
)
};
// We overwrite the buf.compress_bytes since `keccak_256` has an
Expand Down
Loading

0 comments on commit 48f19f8

Please sign in to comment.