Consume input file in streaming mode

This commit is contained in:
Christian Reitter 2025-08-20 13:53:56 +02:00
parent aa9ec80ece
commit 6ffe4a48eb
1 changed files with 7 additions and 6 deletions

View File

@ -84,7 +84,6 @@ pub fn brainwallet_sha256_check_btc(
hashing_rounds: usize,
hasher_repetition_count: usize,
) {
if hashing_rounds == 0 {
panic!("Invalid number of hashing rounds");
}
@ -105,15 +104,17 @@ pub fn brainwallet_sha256_check_btc(
// minor performance tuning: use larger buffer size then default 8KiB
const BUF_READER_CAPACITY: usize = 1_048_576; // 1MiB
let source_id = "brainwallet-sha256";
// silently drop any problematic lines
// TODO replace with a mode that flags problematic inputs and keeps statistics on them
let passphrases: Vec<String> = BufReader::with_capacity(BUF_READER_CAPACITY, file)
//
// This distributes inputs on the fly to the parallel runners,
// avoiding the need to load the initial input file completely into memory first
BufReader::with_capacity(BUF_READER_CAPACITY, file)
.lines()
.par_bridge()
.filter_map(|l| l.ok())
.collect();
let source_id = "brainwallet-sha256";
passphrases.par_iter().for_each(|passphrase| {
.for_each(|passphrase| {
// Count lines and print progress
c.count_and_print_regularly();