Consume input file in streaming mode
This commit is contained in:
parent
aa9ec80ece
commit
6ffe4a48eb
|
@ -84,7 +84,6 @@ pub fn brainwallet_sha256_check_btc(
|
||||||
hashing_rounds: usize,
|
hashing_rounds: usize,
|
||||||
hasher_repetition_count: usize,
|
hasher_repetition_count: usize,
|
||||||
) {
|
) {
|
||||||
|
|
||||||
if hashing_rounds == 0 {
|
if hashing_rounds == 0 {
|
||||||
panic!("Invalid number of hashing rounds");
|
panic!("Invalid number of hashing rounds");
|
||||||
}
|
}
|
||||||
|
@ -105,15 +104,17 @@ pub fn brainwallet_sha256_check_btc(
|
||||||
// minor performance tuning: use larger buffer size then default 8KiB
|
// minor performance tuning: use larger buffer size then default 8KiB
|
||||||
const BUF_READER_CAPACITY: usize = 1_048_576; // 1MiB
|
const BUF_READER_CAPACITY: usize = 1_048_576; // 1MiB
|
||||||
|
|
||||||
|
let source_id = "brainwallet-sha256";
|
||||||
// silently drop any problematic lines
|
// silently drop any problematic lines
|
||||||
// TODO replace with a mode that flags problematic inputs and keeps statistics on them
|
// TODO replace with a mode that flags problematic inputs and keeps statistics on them
|
||||||
let passphrases: Vec<String> = BufReader::with_capacity(BUF_READER_CAPACITY, file)
|
//
|
||||||
|
// This distributes inputs on the fly to the parallel runners,
|
||||||
|
// avoiding the need to load the initial input file completely into memory first
|
||||||
|
BufReader::with_capacity(BUF_READER_CAPACITY, file)
|
||||||
.lines()
|
.lines()
|
||||||
|
.par_bridge()
|
||||||
.filter_map(|l| l.ok())
|
.filter_map(|l| l.ok())
|
||||||
.collect();
|
.for_each(|passphrase| {
|
||||||
let source_id = "brainwallet-sha256";
|
|
||||||
|
|
||||||
passphrases.par_iter().for_each(|passphrase| {
|
|
||||||
// Count lines and print progress
|
// Count lines and print progress
|
||||||
c.count_and_print_regularly();
|
c.count_and_print_regularly();
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue